var/home/core/zuul-output/0000755000175000017500000000000015067504624014536 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067511067015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003732114515067511061017705 0ustar rootrootOct 02 14:20:47 crc systemd[1]: Starting Kubernetes Kubelet... Oct 02 14:20:47 crc restorecon[4671]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:47 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 02 14:20:48 crc restorecon[4671]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 02 14:20:50 crc kubenswrapper[4717]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 02 14:20:50 crc kubenswrapper[4717]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 02 14:20:50 crc kubenswrapper[4717]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 02 14:20:50 crc kubenswrapper[4717]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 02 14:20:50 crc kubenswrapper[4717]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 02 14:20:50 crc kubenswrapper[4717]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.366329 4717 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390633 4717 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390682 4717 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390688 4717 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390692 4717 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390698 4717 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390703 4717 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390707 4717 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390712 4717 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390718 4717 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390721 4717 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390725 4717 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390729 4717 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390733 4717 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390736 4717 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390740 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390744 4717 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390747 4717 feature_gate.go:330] unrecognized feature gate: Example Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390751 4717 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390755 4717 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390759 4717 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390764 4717 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390768 4717 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390771 4717 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390775 4717 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390779 4717 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390784 4717 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390787 4717 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390791 4717 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390794 4717 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390799 4717 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390803 4717 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390808 4717 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390812 4717 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390816 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390820 4717 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390823 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390826 4717 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390830 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390833 4717 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390837 4717 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390840 4717 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390843 4717 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390847 4717 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390850 4717 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390854 4717 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390858 4717 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390862 4717 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390866 4717 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390870 4717 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390875 4717 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390882 4717 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390887 4717 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390891 4717 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390895 4717 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390899 4717 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390904 4717 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390907 4717 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390913 4717 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390917 4717 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390921 4717 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390925 4717 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390958 4717 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390962 4717 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390966 4717 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390969 4717 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390973 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390976 4717 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390980 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390983 4717 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390986 4717 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.390990 4717 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391087 4717 flags.go:64] FLAG: --address="0.0.0.0" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391099 4717 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391107 4717 flags.go:64] FLAG: --anonymous-auth="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391112 4717 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391119 4717 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391123 4717 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391130 4717 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391136 4717 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391140 4717 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391144 4717 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391149 4717 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391156 4717 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391162 4717 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391166 4717 flags.go:64] FLAG: --cgroup-root="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391170 4717 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391250 4717 flags.go:64] FLAG: --client-ca-file="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391255 4717 flags.go:64] FLAG: --cloud-config="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391260 4717 flags.go:64] FLAG: --cloud-provider="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391264 4717 flags.go:64] FLAG: --cluster-dns="[]" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391271 4717 flags.go:64] FLAG: --cluster-domain="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391275 4717 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391279 4717 flags.go:64] FLAG: --config-dir="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391284 4717 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391289 4717 flags.go:64] FLAG: --container-log-max-files="5" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391294 4717 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391299 4717 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391303 4717 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391308 4717 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391312 4717 flags.go:64] FLAG: --contention-profiling="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391317 4717 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391321 4717 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391326 4717 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391330 4717 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391336 4717 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391341 4717 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391345 4717 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391349 4717 flags.go:64] FLAG: --enable-load-reader="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391354 4717 flags.go:64] FLAG: --enable-server="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391359 4717 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391365 4717 flags.go:64] FLAG: --event-burst="100" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391369 4717 flags.go:64] FLAG: --event-qps="50" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391373 4717 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391378 4717 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391383 4717 flags.go:64] FLAG: --eviction-hard="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391388 4717 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391393 4717 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391397 4717 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391402 4717 flags.go:64] FLAG: --eviction-soft="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391406 4717 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391411 4717 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391415 4717 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391419 4717 flags.go:64] FLAG: --experimental-mounter-path="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391423 4717 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391427 4717 flags.go:64] FLAG: --fail-swap-on="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391431 4717 flags.go:64] FLAG: --feature-gates="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391437 4717 flags.go:64] FLAG: --file-check-frequency="20s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391441 4717 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391446 4717 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391450 4717 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391455 4717 flags.go:64] FLAG: --healthz-port="10248" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391460 4717 flags.go:64] FLAG: --help="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391464 4717 flags.go:64] FLAG: --hostname-override="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391470 4717 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391475 4717 flags.go:64] FLAG: --http-check-frequency="20s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391479 4717 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391484 4717 flags.go:64] FLAG: --image-credential-provider-config="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391488 4717 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391493 4717 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391497 4717 flags.go:64] FLAG: --image-service-endpoint="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391544 4717 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391552 4717 flags.go:64] FLAG: --kube-api-burst="100" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391557 4717 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391563 4717 flags.go:64] FLAG: --kube-api-qps="50" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391568 4717 flags.go:64] FLAG: --kube-reserved="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391574 4717 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391579 4717 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391585 4717 flags.go:64] FLAG: --kubelet-cgroups="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391590 4717 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391596 4717 flags.go:64] FLAG: --lock-file="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391601 4717 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391606 4717 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391612 4717 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391620 4717 flags.go:64] FLAG: --log-json-split-stream="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391625 4717 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391630 4717 flags.go:64] FLAG: --log-text-split-stream="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391634 4717 flags.go:64] FLAG: --logging-format="text" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391639 4717 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391644 4717 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391649 4717 flags.go:64] FLAG: --manifest-url="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391676 4717 flags.go:64] FLAG: --manifest-url-header="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391694 4717 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391699 4717 flags.go:64] FLAG: --max-open-files="1000000" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391709 4717 flags.go:64] FLAG: --max-pods="110" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391714 4717 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391718 4717 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391723 4717 flags.go:64] FLAG: --memory-manager-policy="None" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391727 4717 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391733 4717 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391738 4717 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391742 4717 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391804 4717 flags.go:64] FLAG: --node-status-max-images="50" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391809 4717 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391815 4717 flags.go:64] FLAG: --oom-score-adj="-999" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391819 4717 flags.go:64] FLAG: --pod-cidr="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391823 4717 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391832 4717 flags.go:64] FLAG: --pod-manifest-path="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391836 4717 flags.go:64] FLAG: --pod-max-pids="-1" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391840 4717 flags.go:64] FLAG: --pods-per-core="0" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391845 4717 flags.go:64] FLAG: --port="10250" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391849 4717 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391854 4717 flags.go:64] FLAG: --provider-id="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391858 4717 flags.go:64] FLAG: --qos-reserved="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391862 4717 flags.go:64] FLAG: --read-only-port="10255" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391867 4717 flags.go:64] FLAG: --register-node="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391871 4717 flags.go:64] FLAG: --register-schedulable="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391875 4717 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391883 4717 flags.go:64] FLAG: --registry-burst="10" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391887 4717 flags.go:64] FLAG: --registry-qps="5" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391891 4717 flags.go:64] FLAG: --reserved-cpus="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391896 4717 flags.go:64] FLAG: --reserved-memory="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391902 4717 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391906 4717 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391910 4717 flags.go:64] FLAG: --rotate-certificates="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391915 4717 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391919 4717 flags.go:64] FLAG: --runonce="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391923 4717 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391943 4717 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391949 4717 flags.go:64] FLAG: --seccomp-default="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391953 4717 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391958 4717 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391963 4717 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391967 4717 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391972 4717 flags.go:64] FLAG: --storage-driver-password="root" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391976 4717 flags.go:64] FLAG: --storage-driver-secure="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391981 4717 flags.go:64] FLAG: --storage-driver-table="stats" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391985 4717 flags.go:64] FLAG: --storage-driver-user="root" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391990 4717 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391994 4717 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.391998 4717 flags.go:64] FLAG: --system-cgroups="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392003 4717 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392009 4717 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392013 4717 flags.go:64] FLAG: --tls-cert-file="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392018 4717 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392024 4717 flags.go:64] FLAG: --tls-min-version="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392028 4717 flags.go:64] FLAG: --tls-private-key-file="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392033 4717 flags.go:64] FLAG: --topology-manager-policy="none" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392037 4717 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392041 4717 flags.go:64] FLAG: --topology-manager-scope="container" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392046 4717 flags.go:64] FLAG: --v="2" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392052 4717 flags.go:64] FLAG: --version="false" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392058 4717 flags.go:64] FLAG: --vmodule="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392064 4717 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392069 4717 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392168 4717 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392173 4717 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392178 4717 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392181 4717 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392185 4717 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392189 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392192 4717 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392197 4717 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392201 4717 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392206 4717 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392210 4717 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392214 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392219 4717 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392222 4717 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392227 4717 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392231 4717 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392235 4717 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392238 4717 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392242 4717 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392246 4717 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392250 4717 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392253 4717 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392257 4717 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392260 4717 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392265 4717 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392269 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392273 4717 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392277 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392280 4717 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392284 4717 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392288 4717 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392292 4717 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392296 4717 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392299 4717 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392303 4717 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392306 4717 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392310 4717 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392313 4717 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392317 4717 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392321 4717 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392325 4717 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392328 4717 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392332 4717 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392335 4717 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392339 4717 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392342 4717 feature_gate.go:330] unrecognized feature gate: Example Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392345 4717 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392349 4717 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392353 4717 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392357 4717 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392360 4717 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392364 4717 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392367 4717 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392373 4717 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392377 4717 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392381 4717 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392384 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392388 4717 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392391 4717 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392395 4717 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392398 4717 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392403 4717 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392407 4717 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392411 4717 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392415 4717 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392419 4717 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392423 4717 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392426 4717 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392430 4717 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392433 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.392436 4717 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.392443 4717 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.400185 4717 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.400232 4717 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400299 4717 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400307 4717 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400311 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400316 4717 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400320 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400324 4717 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400328 4717 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400331 4717 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400335 4717 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400340 4717 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400347 4717 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400351 4717 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400355 4717 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400359 4717 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400363 4717 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400366 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400370 4717 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400373 4717 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400377 4717 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400380 4717 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400384 4717 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400388 4717 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400392 4717 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400397 4717 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400401 4717 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400405 4717 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400409 4717 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400412 4717 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400416 4717 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400420 4717 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400424 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400427 4717 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400431 4717 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400434 4717 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400440 4717 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400443 4717 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400447 4717 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400450 4717 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400454 4717 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400458 4717 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400462 4717 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400465 4717 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400470 4717 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400474 4717 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400478 4717 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400482 4717 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400486 4717 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400489 4717 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400493 4717 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400496 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400500 4717 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400503 4717 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400507 4717 feature_gate.go:330] unrecognized feature gate: Example Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400511 4717 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400514 4717 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400519 4717 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400524 4717 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400527 4717 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400531 4717 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400535 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400538 4717 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400542 4717 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400546 4717 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400551 4717 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400556 4717 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400560 4717 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400563 4717 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400567 4717 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400570 4717 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400574 4717 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400578 4717 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.400585 4717 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400693 4717 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400699 4717 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400703 4717 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400707 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400712 4717 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400716 4717 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400720 4717 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400725 4717 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400731 4717 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400737 4717 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400741 4717 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400745 4717 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400750 4717 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400756 4717 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400760 4717 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400767 4717 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400771 4717 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400776 4717 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400781 4717 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400786 4717 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400790 4717 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400795 4717 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400800 4717 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400805 4717 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400810 4717 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400813 4717 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400818 4717 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400821 4717 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400826 4717 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400829 4717 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400833 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400837 4717 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400840 4717 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400844 4717 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400849 4717 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400853 4717 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400857 4717 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400861 4717 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400864 4717 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400869 4717 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400874 4717 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400878 4717 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400882 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400885 4717 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400889 4717 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400893 4717 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400896 4717 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400902 4717 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400905 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400909 4717 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400912 4717 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400916 4717 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400920 4717 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400924 4717 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400927 4717 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400943 4717 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400947 4717 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400952 4717 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400956 4717 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400959 4717 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400963 4717 feature_gate.go:330] unrecognized feature gate: Example Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400966 4717 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400970 4717 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400974 4717 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400977 4717 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400981 4717 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400984 4717 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400988 4717 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400991 4717 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400995 4717 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.400999 4717 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.401005 4717 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.401186 4717 server.go:940] "Client rotation is on, will bootstrap in background" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.421051 4717 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.421203 4717 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.436685 4717 server.go:997] "Starting client certificate rotation" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.436739 4717 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.437568 4717 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-02 22:52:53.915338381 +0000 UTC Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.437710 4717 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2216h32m3.477631411s for next certificate rotation Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.468073 4717 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.470268 4717 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.518923 4717 log.go:25] "Validated CRI v1 runtime API" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.697125 4717 log.go:25] "Validated CRI v1 image API" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.701355 4717 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.707276 4717 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-02-10-59-36-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.707307 4717 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:29 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.723185 4717 manager.go:217] Machine: {Timestamp:2025-10-02 14:20:50.721257357 +0000 UTC m=+1.573111823 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:f4ff76e8-93a4-4bac-8551-5d1b7e988a7f BootID:57121bc9-a96d-4ae5-b385-b930e368e855 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:29 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:03:3b:92 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:03:3b:92 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:97:5b:db Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:a3:c3:17 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:b2:38:51 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:dc:36:67 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:66:28:8a:c5:12:a6 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:02:bb:ea:a8:41:52 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.723389 4717 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.723531 4717 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.723770 4717 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.723993 4717 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.724020 4717 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.724699 4717 topology_manager.go:138] "Creating topology manager with none policy" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.724714 4717 container_manager_linux.go:303] "Creating device plugin manager" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.725127 4717 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.725145 4717 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.725866 4717 state_mem.go:36] "Initialized new in-memory state store" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.726026 4717 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.748270 4717 kubelet.go:418] "Attempting to sync node with API server" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.748289 4717 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.748304 4717 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.748317 4717 kubelet.go:324] "Adding apiserver pod source" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.748341 4717 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.757225 4717 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.757688 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.757759 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.757746 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.757844 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.759700 4717 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.761204 4717 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762529 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762558 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762568 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762577 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762592 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762603 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762612 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762626 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762638 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762647 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762660 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762669 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.762715 4717 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.763276 4717 server.go:1280] "Started kubelet" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.763909 4717 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.764167 4717 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.764622 4717 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 02 14:20:50 crc systemd[1]: Started Kubernetes Kubelet. Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.767919 4717 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.769023 4717 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.769105 4717 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.769202 4717 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 14:49:23.086329095 +0000 UTC Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.769275 4717 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2328h28m32.317058938s for next certificate rotation Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.769545 4717 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.769809 4717 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.769896 4717 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.769972 4717 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.770324 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.770337 4717 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="200ms" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.770509 4717 factory.go:55] Registering systemd factory Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.770569 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.770589 4717 factory.go:221] Registration of the systemd container factory successfully Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.783380 4717 factory.go:153] Registering CRI-O factory Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.783410 4717 factory.go:221] Registration of the crio container factory successfully Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.783493 4717 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.783515 4717 factory.go:103] Registering Raw factory Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.783528 4717 manager.go:1196] Started watching for new ooms in manager Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.783853 4717 server.go:460] "Adding debug handlers to kubelet server" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.784134 4717 manager.go:319] Starting recovery of all containers Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.785847 4717 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.110:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186ab27c284a2756 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-02 14:20:50.763237206 +0000 UTC m=+1.615091652,LastTimestamp:2025-10-02 14:20:50.763237206 +0000 UTC m=+1.615091652,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789737 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789799 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789821 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789837 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789854 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789870 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789886 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789903 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789926 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.789988 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790006 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790022 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790037 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790056 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790072 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790090 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790109 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790125 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790142 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790158 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790173 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790188 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790203 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790218 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790235 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790249 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790270 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790287 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790305 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790324 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790341 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790360 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790375 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790390 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790406 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790421 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790436 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790451 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790466 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790481 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790497 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790512 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790528 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790543 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790558 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790573 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790596 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790614 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790628 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790644 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790665 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790680 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790700 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790720 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790738 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790753 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790769 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790784 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790800 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790815 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790830 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790847 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790863 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790916 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790958 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.790987 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791007 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791028 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791045 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791063 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791081 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791097 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791113 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791130 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791146 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791161 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791178 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791193 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791209 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791231 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791247 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791263 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791279 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791296 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791313 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791329 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791347 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791362 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791377 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791392 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791408 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791424 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791439 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791455 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791471 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791488 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791503 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791518 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791534 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791550 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791566 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791581 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791596 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791613 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791633 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791652 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791666 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791679 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791690 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791703 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791715 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791735 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791748 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791822 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791837 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791849 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791861 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.791873 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.801309 4717 manager.go:324] Recovery completed Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802119 4717 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802175 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802206 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802222 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802238 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802260 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802280 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802303 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802405 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802479 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802495 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802517 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802541 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802583 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802601 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802611 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802628 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802639 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802649 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802667 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802680 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802694 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.802756 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803097 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803123 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803140 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803527 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803558 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803575 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803589 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803603 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803616 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803628 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803642 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803655 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803667 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803681 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803696 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803709 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803721 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803733 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803748 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803761 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803773 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803786 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803798 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803810 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803823 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803839 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803855 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803866 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803878 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803890 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803904 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803918 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803948 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803973 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.803989 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804001 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804013 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804029 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804043 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804055 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804068 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804082 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804095 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804108 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804121 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804134 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804146 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804161 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804173 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804186 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804199 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804212 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804285 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804301 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804313 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804327 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804340 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804353 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804365 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804378 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804391 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804404 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804421 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804435 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804450 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804462 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804475 4717 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804487 4717 reconstruct.go:97] "Volume reconstruction finished" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.804496 4717 reconciler.go:26] "Reconciler: start to sync state" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.810496 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.811922 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.811981 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.811991 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.814396 4717 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.814416 4717 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.814478 4717 state_mem.go:36] "Initialized new in-memory state store" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.835981 4717 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.837538 4717 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.837572 4717 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.837598 4717 kubelet.go:2335] "Starting kubelet main sync loop" Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.837667 4717 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 02 14:20:50 crc kubenswrapper[4717]: W1002 14:20:50.838197 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.838246 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.869989 4717 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.938786 4717 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.968829 4717 policy_none.go:49] "None policy: Start" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.969699 4717 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 02 14:20:50 crc kubenswrapper[4717]: I1002 14:20:50.969723 4717 state_mem.go:35] "Initializing new in-memory state store" Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.970966 4717 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 02 14:20:50 crc kubenswrapper[4717]: E1002 14:20:50.971234 4717 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="400ms" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.025044 4717 manager.go:334] "Starting Device Plugin manager" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.025114 4717 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.025126 4717 server.go:79] "Starting device plugin registration server" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.025495 4717 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.025514 4717 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.025620 4717 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.025703 4717 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.025710 4717 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 02 14:20:51 crc kubenswrapper[4717]: E1002 14:20:51.038605 4717 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.126506 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.127539 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.127575 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.127586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.127613 4717 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 14:20:51 crc kubenswrapper[4717]: E1002 14:20:51.128056 4717 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.139290 4717 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.139400 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.140540 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.140561 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.140571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.140707 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.140813 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.140845 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141449 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141493 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141508 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141534 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141587 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141762 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141844 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.141873 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143167 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143177 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143235 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143252 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143196 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143315 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143394 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143418 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.143441 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144207 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144232 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144243 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144267 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144286 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144295 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144403 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144550 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.144581 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.145834 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.145856 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.145864 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.146373 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.146390 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.146400 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.146512 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.146529 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.147592 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.147687 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.147707 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208338 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208379 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208421 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208442 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208459 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208474 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208489 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208507 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208596 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208646 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208668 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208695 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208715 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208753 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.208787 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310414 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310532 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310597 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310534 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310711 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310755 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310788 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310818 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310850 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310852 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310875 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310883 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310902 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310918 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310968 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310995 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310947 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311006 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.310961 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311047 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311065 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311078 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311083 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311119 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311120 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311135 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311144 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311180 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311342 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.311212 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.328972 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.330126 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.330179 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.330193 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.330225 4717 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 14:20:51 crc kubenswrapper[4717]: E1002 14:20:51.330651 4717 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 02 14:20:51 crc kubenswrapper[4717]: E1002 14:20:51.372075 4717 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="800ms" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.473115 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.479200 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.496476 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.510573 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.512373 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:20:51 crc kubenswrapper[4717]: W1002 14:20:51.554975 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-76d6265b4b75c88c470183c7a386c9a0125049d76244dbd6aa2c481f94c3a215 WatchSource:0}: Error finding container 76d6265b4b75c88c470183c7a386c9a0125049d76244dbd6aa2c481f94c3a215: Status 404 returned error can't find the container with id 76d6265b4b75c88c470183c7a386c9a0125049d76244dbd6aa2c481f94c3a215 Oct 02 14:20:51 crc kubenswrapper[4717]: W1002 14:20:51.555796 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-bb39bfc8d8aa96637647beda877a265f12a528f145717eb81332a8b5fa0c9b53 WatchSource:0}: Error finding container bb39bfc8d8aa96637647beda877a265f12a528f145717eb81332a8b5fa0c9b53: Status 404 returned error can't find the container with id bb39bfc8d8aa96637647beda877a265f12a528f145717eb81332a8b5fa0c9b53 Oct 02 14:20:51 crc kubenswrapper[4717]: W1002 14:20:51.557107 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-b4fb235914f5093bff61b2f66495ed3b0ff629d0b5a86d93d26a3a6e6d3e2248 WatchSource:0}: Error finding container b4fb235914f5093bff61b2f66495ed3b0ff629d0b5a86d93d26a3a6e6d3e2248: Status 404 returned error can't find the container with id b4fb235914f5093bff61b2f66495ed3b0ff629d0b5a86d93d26a3a6e6d3e2248 Oct 02 14:20:51 crc kubenswrapper[4717]: W1002 14:20:51.558358 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-66390b864a1d018ddbc977c2eaa310e4eafa8ebb26111c7fac94ec89e259787a WatchSource:0}: Error finding container 66390b864a1d018ddbc977c2eaa310e4eafa8ebb26111c7fac94ec89e259787a: Status 404 returned error can't find the container with id 66390b864a1d018ddbc977c2eaa310e4eafa8ebb26111c7fac94ec89e259787a Oct 02 14:20:51 crc kubenswrapper[4717]: W1002 14:20:51.559268 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-9fec7f473d09d88eb989e4475a24b94812b52cba365c5758e045d6afbdff66fe WatchSource:0}: Error finding container 9fec7f473d09d88eb989e4475a24b94812b52cba365c5758e045d6afbdff66fe: Status 404 returned error can't find the container with id 9fec7f473d09d88eb989e4475a24b94812b52cba365c5758e045d6afbdff66fe Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.731437 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.733511 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.733554 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.733566 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.733596 4717 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 14:20:51 crc kubenswrapper[4717]: E1002 14:20:51.734144 4717 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.769449 4717 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:51 crc kubenswrapper[4717]: W1002 14:20:51.831104 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:51 crc kubenswrapper[4717]: E1002 14:20:51.831195 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.842052 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9fec7f473d09d88eb989e4475a24b94812b52cba365c5758e045d6afbdff66fe"} Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.842889 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b4fb235914f5093bff61b2f66495ed3b0ff629d0b5a86d93d26a3a6e6d3e2248"} Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.843784 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"76d6265b4b75c88c470183c7a386c9a0125049d76244dbd6aa2c481f94c3a215"} Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.845447 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bb39bfc8d8aa96637647beda877a265f12a528f145717eb81332a8b5fa0c9b53"} Oct 02 14:20:51 crc kubenswrapper[4717]: I1002 14:20:51.846384 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"66390b864a1d018ddbc977c2eaa310e4eafa8ebb26111c7fac94ec89e259787a"} Oct 02 14:20:52 crc kubenswrapper[4717]: W1002 14:20:52.067270 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:52 crc kubenswrapper[4717]: E1002 14:20:52.067366 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:52 crc kubenswrapper[4717]: W1002 14:20:52.077394 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:52 crc kubenswrapper[4717]: E1002 14:20:52.077491 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:52 crc kubenswrapper[4717]: E1002 14:20:52.173030 4717 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="1.6s" Oct 02 14:20:52 crc kubenswrapper[4717]: W1002 14:20:52.357457 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:52 crc kubenswrapper[4717]: E1002 14:20:52.357552 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:52 crc kubenswrapper[4717]: I1002 14:20:52.535155 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:52 crc kubenswrapper[4717]: I1002 14:20:52.536716 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:52 crc kubenswrapper[4717]: I1002 14:20:52.536777 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:52 crc kubenswrapper[4717]: I1002 14:20:52.536800 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:52 crc kubenswrapper[4717]: I1002 14:20:52.536841 4717 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 14:20:52 crc kubenswrapper[4717]: E1002 14:20:52.537509 4717 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 02 14:20:52 crc kubenswrapper[4717]: I1002 14:20:52.769888 4717 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.769431 4717 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:53 crc kubenswrapper[4717]: E1002 14:20:53.774027 4717 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="3.2s" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.852996 4717 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e" exitCode=0 Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.853532 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.857024 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e"} Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.858761 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.858795 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.858805 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.859865 4717 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e" exitCode=0 Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.859954 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e"} Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.860067 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.861050 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.861099 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.861112 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.862617 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.863514 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.863538 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.863549 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.866074 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.866077 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea"} Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.866135 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f"} Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.866149 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def"} Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.866160 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6"} Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.866866 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.866892 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.866900 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.868216 4717 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ca7d369bb4c491d5b7f6d735b6b773018ed384a04b07fd94441a278e6424c8ab" exitCode=0 Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.868342 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.868645 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ca7d369bb4c491d5b7f6d735b6b773018ed384a04b07fd94441a278e6424c8ab"} Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.868973 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.869017 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.869025 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.871564 4717 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6" exitCode=0 Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.871613 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6"} Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.871721 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.875847 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.875883 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:53 crc kubenswrapper[4717]: I1002 14:20:53.875894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:54 crc kubenswrapper[4717]: W1002 14:20:54.089644 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:54 crc kubenswrapper[4717]: E1002 14:20:54.089714 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.138224 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.139316 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.139350 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.139362 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.139499 4717 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 14:20:54 crc kubenswrapper[4717]: E1002 14:20:54.139902 4717 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Oct 02 14:20:54 crc kubenswrapper[4717]: W1002 14:20:54.168825 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:54 crc kubenswrapper[4717]: E1002 14:20:54.168911 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.768631 4717 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.879726 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.879770 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.879780 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.879789 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.879798 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.880249 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.881014 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.881033 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.881041 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.882279 4717 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="15e8ce9b85b40ee7b7674de8885dc2a03b69f2d1e19d5a0a308383a794bcadd0" exitCode=0 Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.882332 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"15e8ce9b85b40ee7b7674de8885dc2a03b69f2d1e19d5a0a308383a794bcadd0"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.882416 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.883055 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.883072 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.883082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.883983 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.884078 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.885047 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.885084 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.885096 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.893462 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.893501 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.893523 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.893522 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.893635 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37"} Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.894696 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.894698 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.894747 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.894758 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.894721 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:54 crc kubenswrapper[4717]: I1002 14:20:54.894837 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:54 crc kubenswrapper[4717]: W1002 14:20:54.957288 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:54 crc kubenswrapper[4717]: E1002 14:20:54.957382 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:55 crc kubenswrapper[4717]: W1002 14:20:55.015145 4717 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Oct 02 14:20:55 crc kubenswrapper[4717]: E1002 14:20:55.015214 4717 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.897981 4717 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9d7be6f7f2c6342d250a3066c8e3f7e2be535a94ea71a20af8280ba3dc32ec6a" exitCode=0 Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.898380 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.898392 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.898057 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9d7be6f7f2c6342d250a3066c8e3f7e2be535a94ea71a20af8280ba3dc32ec6a"} Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.898426 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.898414 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.898446 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.898100 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899769 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899798 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899806 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899806 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899829 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899837 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899853 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899877 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.899891 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.900704 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.900727 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:55 crc kubenswrapper[4717]: I1002 14:20:55.900735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:56 crc kubenswrapper[4717]: I1002 14:20:56.904716 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"926d64376986616fc42ae2650275b15958fb83c24caae6c7250d9c7dbeac083e"} Oct 02 14:20:56 crc kubenswrapper[4717]: I1002 14:20:56.904757 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5b4d04e8fd5034048590a99d3d3dc3e0b29330ea80071f41b6d18425d1a19745"} Oct 02 14:20:56 crc kubenswrapper[4717]: I1002 14:20:56.904767 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"941dc315a1b135bd2da99bd34a3b45858d89b0336f36ee78aa861ee366238c1f"} Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.340620 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.342270 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.342301 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.342309 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.342332 4717 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.914126 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"07a618f871e65aada2ce1149004e1d42797353d2d2c6e6b70599a72430263455"} Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.914170 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8e7d47d36c4c31c6182bbf2690d314f4fc3b00ea446d6621a114dced1f169326"} Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.914368 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.915815 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.915872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.915884 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.925575 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.925702 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.925737 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.926723 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.926757 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:57 crc kubenswrapper[4717]: I1002 14:20:57.926769 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.197718 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.905459 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.905658 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.906833 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.906867 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.906876 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.915790 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.917066 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.917143 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.917157 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.948344 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.948529 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.948577 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.949726 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.949760 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:58 crc kubenswrapper[4717]: I1002 14:20:58.949772 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.164005 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.164176 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.165436 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.165473 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.165485 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.751147 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.917398 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.917424 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.918656 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.918687 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.918696 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.918748 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.918768 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:20:59 crc kubenswrapper[4717]: I1002 14:20:59.918781 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:00 crc kubenswrapper[4717]: I1002 14:21:00.863102 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:21:00 crc kubenswrapper[4717]: I1002 14:21:00.863339 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:00 crc kubenswrapper[4717]: I1002 14:21:00.864726 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:00 crc kubenswrapper[4717]: I1002 14:21:00.864780 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:00 crc kubenswrapper[4717]: I1002 14:21:00.864790 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:01 crc kubenswrapper[4717]: E1002 14:21:01.038706 4717 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 02 14:21:02 crc kubenswrapper[4717]: I1002 14:21:02.165100 4717 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 02 14:21:02 crc kubenswrapper[4717]: I1002 14:21:02.165183 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 02 14:21:02 crc kubenswrapper[4717]: I1002 14:21:02.611802 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:21:02 crc kubenswrapper[4717]: I1002 14:21:02.612057 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:02 crc kubenswrapper[4717]: I1002 14:21:02.613463 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:02 crc kubenswrapper[4717]: I1002 14:21:02.613506 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:02 crc kubenswrapper[4717]: I1002 14:21:02.613515 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.284396 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.284582 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.286650 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.286698 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.286717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.290170 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.926314 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.927538 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.927584 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.927594 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:03 crc kubenswrapper[4717]: I1002 14:21:03.955099 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:21:04 crc kubenswrapper[4717]: I1002 14:21:04.928481 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:04 crc kubenswrapper[4717]: I1002 14:21:04.929210 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:04 crc kubenswrapper[4717]: I1002 14:21:04.929243 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:04 crc kubenswrapper[4717]: I1002 14:21:04.929251 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:05 crc kubenswrapper[4717]: E1002 14:21:05.520810 4717 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.186ab27c284a2756 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-02 14:20:50.763237206 +0000 UTC m=+1.615091652,LastTimestamp:2025-10-02 14:20:50.763237206 +0000 UTC m=+1.615091652,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 02 14:21:05 crc kubenswrapper[4717]: I1002 14:21:05.770913 4717 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 02 14:21:06 crc kubenswrapper[4717]: I1002 14:21:06.393232 4717 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 02 14:21:06 crc kubenswrapper[4717]: I1002 14:21:06.393324 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 02 14:21:06 crc kubenswrapper[4717]: I1002 14:21:06.397479 4717 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 02 14:21:06 crc kubenswrapper[4717]: I1002 14:21:06.397534 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.076763 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.076903 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.078033 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.078073 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.078085 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.121958 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.934229 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.934397 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.934422 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.935348 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.935379 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.935391 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.935399 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.935432 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.935453 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.938887 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:21:07 crc kubenswrapper[4717]: I1002 14:21:07.946500 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 02 14:21:08 crc kubenswrapper[4717]: I1002 14:21:08.936634 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:08 crc kubenswrapper[4717]: I1002 14:21:08.936681 4717 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 02 14:21:08 crc kubenswrapper[4717]: I1002 14:21:08.938050 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:08 crc kubenswrapper[4717]: I1002 14:21:08.938106 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:08 crc kubenswrapper[4717]: I1002 14:21:08.938130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:08 crc kubenswrapper[4717]: I1002 14:21:08.938363 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:08 crc kubenswrapper[4717]: I1002 14:21:08.938398 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:08 crc kubenswrapper[4717]: I1002 14:21:08.938414 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.038842 4717 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.391369 4717 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.392132 4717 trace.go:236] Trace[1059585340]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Oct-2025 14:20:59.521) (total time: 11870ms): Oct 02 14:21:11 crc kubenswrapper[4717]: Trace[1059585340]: ---"Objects listed" error: 11870ms (14:21:11.392) Oct 02 14:21:11 crc kubenswrapper[4717]: Trace[1059585340]: [11.870318731s] [11.870318731s] END Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.392161 4717 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.393243 4717 trace.go:236] Trace[1328962657]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Oct-2025 14:21:00.366) (total time: 11026ms): Oct 02 14:21:11 crc kubenswrapper[4717]: Trace[1328962657]: ---"Objects listed" error: 11026ms (14:21:11.393) Oct 02 14:21:11 crc kubenswrapper[4717]: Trace[1328962657]: [11.026773696s] [11.026773696s] END Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.393311 4717 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.393739 4717 trace.go:236] Trace[444328354]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Oct-2025 14:20:58.669) (total time: 12724ms): Oct 02 14:21:11 crc kubenswrapper[4717]: Trace[444328354]: ---"Objects listed" error: 12724ms (14:21:11.393) Oct 02 14:21:11 crc kubenswrapper[4717]: Trace[444328354]: [12.724277404s] [12.724277404s] END Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.393760 4717 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.393748 4717 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.395375 4717 trace.go:236] Trace[586848272]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Oct-2025 14:20:59.438) (total time: 11956ms): Oct 02 14:21:11 crc kubenswrapper[4717]: Trace[586848272]: ---"Objects listed" error: 11956ms (14:21:11.395) Oct 02 14:21:11 crc kubenswrapper[4717]: Trace[586848272]: [11.956999292s] [11.956999292s] END Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.395510 4717 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.400254 4717 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.400414 4717 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.401967 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.402037 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.402058 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.402094 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.402113 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.409699 4717 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38026->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.409781 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38026->192.168.126.11:17697: read: connection reset by peer" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.410184 4717 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38034->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.410290 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38034->192.168.126.11:17697: read: connection reset by peer" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.410693 4717 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.410728 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.411323 4717 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.411373 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.421126 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.424532 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.424583 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.424595 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.424620 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.424635 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.433499 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.439416 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.439467 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.439481 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.439505 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.439521 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.450089 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.453673 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.453709 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.453719 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.453738 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.453749 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.464188 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.467686 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.467721 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.467730 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.467747 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.467789 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.475714 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.475843 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.477470 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.477517 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.477530 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.477556 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.477570 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.579520 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.579561 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.579569 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.579586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.579595 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.681741 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.681783 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.681792 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.681810 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.681819 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.762328 4717 apiserver.go:52] "Watching apiserver" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.765957 4717 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.766272 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-5ljkq","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.766668 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.766735 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.766835 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.767074 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.767147 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.767563 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-5ljkq" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.767581 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.767653 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.767694 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.767729 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.768635 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.768693 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.768636 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.768729 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.768856 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.768947 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.768994 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.770140 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.770616 4717 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.771232 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.771496 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.771514 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.772669 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.783811 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.783841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.783849 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.783882 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.783895 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.787327 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795356 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795400 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795424 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795443 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795460 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795477 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795493 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795515 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795530 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795546 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795560 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795575 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795591 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795606 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795621 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795620 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795636 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795652 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795667 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795684 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795699 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795713 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795727 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795741 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795756 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795770 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795784 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795798 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795813 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795827 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795863 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795889 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795910 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795969 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.795993 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796011 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796036 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796055 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796077 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796097 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796118 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796145 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796168 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796168 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796191 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796210 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796232 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796253 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796277 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796295 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796298 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796334 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796352 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796367 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796383 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796398 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796414 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796430 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796447 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796462 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796462 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796479 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796532 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796550 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796564 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796579 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796596 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796613 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796630 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796646 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796662 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796677 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796692 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796707 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796721 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796737 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796760 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796776 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796792 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796868 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796886 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797023 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797054 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797073 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797088 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797102 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797117 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797132 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797148 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797163 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797177 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797192 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797207 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797223 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797240 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797256 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797270 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797286 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797301 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797315 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797329 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797345 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797361 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797376 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797391 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797406 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797420 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797434 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797594 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797611 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797712 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797756 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797795 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797819 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797843 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797866 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797888 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797909 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797946 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797970 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798010 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798050 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798072 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798099 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798120 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798142 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798162 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798185 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798204 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798225 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798250 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798271 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798292 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798315 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798338 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798359 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798382 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798411 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798434 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798460 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798481 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798503 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798525 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798546 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798565 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798581 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798596 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798612 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798632 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798648 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798664 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798684 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798707 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798731 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798758 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798782 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798804 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798827 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798846 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798867 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798891 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798914 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798953 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798975 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799013 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799040 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799120 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799314 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799358 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799384 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799408 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799433 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799454 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799477 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799502 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799524 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799551 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799578 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799604 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799626 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799648 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799672 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799708 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799734 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799757 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799778 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799799 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799825 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799847 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799870 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799892 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799915 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799991 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800018 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800040 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800064 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800094 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800120 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800146 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800410 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800757 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdv45\" (UniqueName: \"kubernetes.io/projected/328e8bee-9892-4374-8985-28ac6cb2d377-kube-api-access-qdv45\") pod \"node-resolver-5ljkq\" (UID: \"328e8bee-9892-4374-8985-28ac6cb2d377\") " pod="openshift-dns/node-resolver-5ljkq" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800835 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800870 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800896 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.803434 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.803736 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.803777 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.803810 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.803844 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.803879 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.803911 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804000 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804032 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804063 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/328e8bee-9892-4374-8985-28ac6cb2d377-hosts-file\") pod \"node-resolver-5ljkq\" (UID: \"328e8bee-9892-4374-8985-28ac6cb2d377\") " pod="openshift-dns/node-resolver-5ljkq" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804097 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804150 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804171 4717 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804186 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804201 4717 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.804998 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.805014 4717 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.805417 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.805549 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796612 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796732 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796846 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.796882 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797117 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797420 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797485 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797682 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797690 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797696 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797834 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.809869 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.810017 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.810025 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.810132 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797886 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797977 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.797992 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798362 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798372 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798417 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798676 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798710 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.798975 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799059 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.810539 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799248 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799482 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799574 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799647 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.799807 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800056 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800068 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800341 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800619 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.800977 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.802122 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.802198 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.802183 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.802233 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.802379 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.802685 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.802791 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.802825 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.803073 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.804509 4717 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.807598 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.807605 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.807817 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.807822 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.807841 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.810773 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.808196 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.808214 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.808309 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.808354 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.810801 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.808645 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.808696 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.809076 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.809113 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.809306 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.809378 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.811051 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.810882 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.811162 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.811401 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.811710 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.811839 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.811945 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812000 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812177 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812212 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812303 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812303 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812409 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812532 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812633 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812881 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.821389 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.821512 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.821555 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.823449 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.828775 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.828795 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.829113 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.829321 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.829518 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.829534 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.829671 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.829901 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.830021 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.830184 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.830672 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831053 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831143 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831238 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831413 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831317 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831457 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831708 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831671 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831743 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831786 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.831843 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832027 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832284 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832386 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832605 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832655 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832832 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832854 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832903 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.832961 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.833078 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.833267 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.833250 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.833397 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.833469 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.833865 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.833913 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.834112 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.834394 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.834405 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.834439 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.834554 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.834803 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.835027 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.835052 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.835083 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.835124 4717 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.835375 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.835580 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836050 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836118 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836238 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836290 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836476 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836495 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836569 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836836 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.836903 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.837275 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.837298 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:12.337270899 +0000 UTC m=+23.189125425 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.837331 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:12.337322051 +0000 UTC m=+23.189176597 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.837544 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.837824 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.838246 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.838309 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.838386 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.838492 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.838517 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.838531 4717 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.838590 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:12.338570384 +0000 UTC m=+23.190424910 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.838669 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.838811 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.838981 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.839164 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.839327 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:21:12.339302704 +0000 UTC m=+23.191157190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.839610 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.840115 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.840146 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.840431 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.840798 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.840888 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.840913 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.840960 4717 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.840983 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: E1002 14:21:11.841011 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:12.340991919 +0000 UTC m=+23.192846365 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.841374 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.841480 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.841519 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-sk55f"] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.842047 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-s7n7q"] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.842069 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.843524 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l7nn7"] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.843556 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.844255 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.844597 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.841704 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.841765 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.842804 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.843273 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.844574 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.845141 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.845279 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.846180 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-kwkj8"] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.846397 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.846960 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.846180 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.846967 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.847069 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.847115 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.847409 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.847103 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.847163 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.847791 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.847892 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.848372 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.848684 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.849475 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.850500 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.851096 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.851187 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.851223 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.851264 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.851473 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.851667 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.851703 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.851828 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852013 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852084 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852117 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852137 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852163 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852232 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852395 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852533 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852687 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852708 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852775 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.812199 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852871 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.809488 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.852410 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.853182 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.853323 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.853452 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.853590 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.853752 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.853968 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.854918 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.854961 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.859466 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.863058 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.865006 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.865248 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.879476 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.881218 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.899142 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908588 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cni-binary-copy\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908626 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-ovn\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908642 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-cnibin\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908659 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cnibin\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908678 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-os-release\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908694 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-env-overrides\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908708 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-system-cni-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908722 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-cni-bin\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908736 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/405aba30-0ff3-4fca-a5da-09c35263665d-proxy-tls\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908799 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908815 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-script-lib\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908829 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/424c679b-8db0-4ba4-9c8f-67a65fe38048-cni-binary-copy\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908842 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-socket-dir-parent\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908857 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-hostroot\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908885 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908900 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-node-log\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908915 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-log-socket\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908943 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4de64e15-550a-4404-92fc-b355535a4bf2-ovn-node-metrics-cert\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908980 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-systemd\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.908995 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-netd\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909013 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-multus-certs\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909034 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns4hg\" (UniqueName: \"kubernetes.io/projected/e1d9164c-6127-4f40-ae97-942e6cd0faf2-kube-api-access-ns4hg\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909048 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-os-release\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909064 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-daemon-config\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909081 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-etc-kubernetes\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909110 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdv45\" (UniqueName: \"kubernetes.io/projected/328e8bee-9892-4374-8985-28ac6cb2d377-kube-api-access-qdv45\") pod \"node-resolver-5ljkq\" (UID: \"328e8bee-9892-4374-8985-28ac6cb2d377\") " pod="openshift-dns/node-resolver-5ljkq" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909143 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/405aba30-0ff3-4fca-a5da-09c35263665d-mcd-auth-proxy-config\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909157 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-bin\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909171 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w94qv\" (UniqueName: \"kubernetes.io/projected/4de64e15-550a-4404-92fc-b355535a4bf2-kube-api-access-w94qv\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909191 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-cni-multus\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909204 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-kubelet\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909218 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v64t\" (UniqueName: \"kubernetes.io/projected/405aba30-0ff3-4fca-a5da-09c35263665d-kube-api-access-7v64t\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909232 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-etc-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909245 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-config\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909275 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/405aba30-0ff3-4fca-a5da-09c35263665d-rootfs\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909289 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-cni-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909339 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909357 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-netns\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909371 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-k8s-cni-cncf-io\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909386 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909403 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909417 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/328e8bee-9892-4374-8985-28ac6cb2d377-hosts-file\") pod \"node-resolver-5ljkq\" (UID: \"328e8bee-9892-4374-8985-28ac6cb2d377\") " pod="openshift-dns/node-resolver-5ljkq" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909434 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-system-cni-dir\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909448 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-systemd-units\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909464 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-conf-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909491 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-kubelet\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909506 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-slash\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909521 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909535 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-ovn-kubernetes\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909549 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-netns\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909563 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-var-lib-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909577 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d88cl\" (UniqueName: \"kubernetes.io/projected/424c679b-8db0-4ba4-9c8f-67a65fe38048-kube-api-access-d88cl\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909617 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909629 4717 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909638 4717 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909648 4717 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909661 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909671 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909681 4717 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909689 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909698 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909706 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909714 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909723 4717 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909732 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909757 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909767 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909776 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909784 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909792 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909830 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909838 4717 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909846 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909856 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909865 4717 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909873 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909882 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909892 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909901 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909910 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909919 4717 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909942 4717 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909952 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909961 4717 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909970 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909980 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.909990 4717 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910000 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910012 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910023 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910031 4717 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910039 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910047 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910055 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910063 4717 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910071 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910079 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910087 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910095 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910103 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910111 4717 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910119 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910127 4717 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910136 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910144 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910152 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910160 4717 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910169 4717 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910177 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910186 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910194 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910202 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910211 4717 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910219 4717 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910228 4717 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910236 4717 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910244 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910262 4717 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910270 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910279 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910288 4717 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910297 4717 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910306 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910314 4717 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910322 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910330 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910338 4717 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910346 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910355 4717 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910363 4717 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910371 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910379 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910387 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910396 4717 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910404 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910695 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910710 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910721 4717 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910729 4717 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910737 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910758 4717 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910766 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910774 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910783 4717 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910791 4717 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910799 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910808 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910816 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910823 4717 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910831 4717 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910839 4717 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910847 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910856 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910864 4717 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910872 4717 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910883 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910892 4717 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910900 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910909 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910917 4717 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910925 4717 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910947 4717 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910957 4717 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910965 4717 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910973 4717 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910982 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910990 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.910998 4717 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911007 4717 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911015 4717 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911023 4717 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911032 4717 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911040 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911048 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911057 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911066 4717 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911074 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911082 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911138 4717 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911150 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911158 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911166 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911174 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911182 4717 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911191 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911201 4717 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911210 4717 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911219 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911228 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911236 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911244 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911252 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911260 4717 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911268 4717 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911278 4717 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911286 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911294 4717 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911303 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911311 4717 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911319 4717 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911327 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911335 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911343 4717 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911351 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911360 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911368 4717 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911376 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911384 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911394 4717 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911402 4717 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911411 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911419 4717 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911427 4717 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911435 4717 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911443 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911450 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911459 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911469 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911478 4717 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911486 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911495 4717 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911502 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911511 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911519 4717 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911527 4717 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911535 4717 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911544 4717 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911553 4717 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911564 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911573 4717 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911582 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911590 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911599 4717 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911607 4717 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911615 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911623 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911632 4717 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911640 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911649 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911663 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.911671 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.912243 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.912265 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.912272 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.912285 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.912294 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:11Z","lastTransitionTime":"2025-10-02T14:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.913032 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.913656 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/328e8bee-9892-4374-8985-28ac6cb2d377-hosts-file\") pod \"node-resolver-5ljkq\" (UID: \"328e8bee-9892-4374-8985-28ac6cb2d377\") " pod="openshift-dns/node-resolver-5ljkq" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.913821 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.916646 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.927917 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.931857 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdv45\" (UniqueName: \"kubernetes.io/projected/328e8bee-9892-4374-8985-28ac6cb2d377-kube-api-access-qdv45\") pod \"node-resolver-5ljkq\" (UID: \"328e8bee-9892-4374-8985-28ac6cb2d377\") " pod="openshift-dns/node-resolver-5ljkq" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.941447 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.946256 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.946625 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.947732 4717 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad" exitCode=255 Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.947791 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad"} Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.950576 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.953545 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"82595e7e58f40d20562927048543b09032bde8369e4966a2019749a08e841ab1"} Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.953794 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.960675 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.969577 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.971271 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.971627 4717 scope.go:117] "RemoveContainer" containerID="888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.973558 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.981088 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:11 crc kubenswrapper[4717]: I1002 14:21:11.996867 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.007384 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.012199 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-hostroot\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.012336 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/405aba30-0ff3-4fca-a5da-09c35263665d-proxy-tls\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.012415 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.012342 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-hostroot\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.012590 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.012495 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-script-lib\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.012730 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/424c679b-8db0-4ba4-9c8f-67a65fe38048-cni-binary-copy\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013464 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/424c679b-8db0-4ba4-9c8f-67a65fe38048-cni-binary-copy\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.012759 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-socket-dir-parent\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013539 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4de64e15-550a-4404-92fc-b355535a4bf2-ovn-node-metrics-cert\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013562 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-node-log\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013611 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-log-socket\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013629 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-systemd\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013646 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-netd\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013662 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-multus-certs\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013677 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-daemon-config\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013691 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-etc-kubernetes\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013720 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns4hg\" (UniqueName: \"kubernetes.io/projected/e1d9164c-6127-4f40-ae97-942e6cd0faf2-kube-api-access-ns4hg\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013737 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-os-release\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013756 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-bin\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013777 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w94qv\" (UniqueName: \"kubernetes.io/projected/4de64e15-550a-4404-92fc-b355535a4bf2-kube-api-access-w94qv\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013793 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-cni-multus\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013812 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/405aba30-0ff3-4fca-a5da-09c35263665d-mcd-auth-proxy-config\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013829 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-kubelet\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013844 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-etc-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013859 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-config\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013882 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v64t\" (UniqueName: \"kubernetes.io/projected/405aba30-0ff3-4fca-a5da-09c35263665d-kube-api-access-7v64t\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013905 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/405aba30-0ff3-4fca-a5da-09c35263665d-rootfs\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013921 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-cni-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013955 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-netns\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013971 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-k8s-cni-cncf-io\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.013998 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014015 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014048 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-system-cni-dir\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014063 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-systemd-units\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014079 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-conf-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014095 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-slash\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014115 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014133 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-ovn-kubernetes\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014159 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-kubelet\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014175 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-netns\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014190 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-var-lib-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014206 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d88cl\" (UniqueName: \"kubernetes.io/projected/424c679b-8db0-4ba4-9c8f-67a65fe38048-kube-api-access-d88cl\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014221 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cni-binary-copy\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014235 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-ovn\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014255 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-cnibin\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014272 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-cni-bin\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014288 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cnibin\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014302 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-os-release\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014316 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-env-overrides\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014330 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-system-cni-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014362 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014403 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-system-cni-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014430 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-kubelet\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014444 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-socket-dir-parent\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014453 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-etc-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014697 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-etc-kubernetes\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014910 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-os-release\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.014950 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-bin\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015073 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-cni-multus\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015005 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/405aba30-0ff3-4fca-a5da-09c35263665d-mcd-auth-proxy-config\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015122 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015151 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-node-log\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015203 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-log-socket\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015228 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-systemd\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015253 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-netd\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015275 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-multus-certs\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015587 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cni-binary-copy\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015627 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-ovn-kubernetes\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015649 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-kubelet\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015670 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-netns\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015676 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/405aba30-0ff3-4fca-a5da-09c35263665d-rootfs\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015692 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-var-lib-openvswitch\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015726 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-daemon-config\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015876 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-cni-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015886 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-netns\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015905 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-run-k8s-cni-cncf-io\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015914 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015947 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-host-var-lib-cni-bin\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015950 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015963 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-ovn\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015981 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015998 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015999 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-slash\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.016007 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.016026 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cnibin\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015952 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e1d9164c-6127-4f40-ae97-942e6cd0faf2-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.016027 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-systemd-units\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.016026 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-multus-conf-dir\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015999 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-os-release\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.016063 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/424c679b-8db0-4ba4-9c8f-67a65fe38048-cnibin\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.015979 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-system-cni-dir\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.016190 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e1d9164c-6127-4f40-ae97-942e6cd0faf2-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.016360 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-config\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.016407 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-env-overrides\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.017556 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-script-lib\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.017796 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.019475 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/405aba30-0ff3-4fca-a5da-09c35263665d-proxy-tls\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.021344 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4de64e15-550a-4404-92fc-b355535a4bf2-ovn-node-metrics-cert\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.028228 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.028726 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d88cl\" (UniqueName: \"kubernetes.io/projected/424c679b-8db0-4ba4-9c8f-67a65fe38048-kube-api-access-d88cl\") pod \"multus-s7n7q\" (UID: \"424c679b-8db0-4ba4-9c8f-67a65fe38048\") " pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.030355 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns4hg\" (UniqueName: \"kubernetes.io/projected/e1d9164c-6127-4f40-ae97-942e6cd0faf2-kube-api-access-ns4hg\") pod \"multus-additional-cni-plugins-kwkj8\" (UID: \"e1d9164c-6127-4f40-ae97-942e6cd0faf2\") " pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.030525 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v64t\" (UniqueName: \"kubernetes.io/projected/405aba30-0ff3-4fca-a5da-09c35263665d-kube-api-access-7v64t\") pod \"machine-config-daemon-sk55f\" (UID: \"405aba30-0ff3-4fca-a5da-09c35263665d\") " pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.033127 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w94qv\" (UniqueName: \"kubernetes.io/projected/4de64e15-550a-4404-92fc-b355535a4bf2-kube-api-access-w94qv\") pod \"ovnkube-node-l7nn7\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.038422 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.046923 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.056532 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.073848 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.082043 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.087165 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-5ljkq" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.092461 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.094861 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.101125 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: W1002 14:21:12.101318 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod328e8bee_9892_4374_8985_28ac6cb2d377.slice/crio-ef24631aaae9891003231eadbc7ebe8e7628c81a942a4a0f3ca3b57d186e24c1 WatchSource:0}: Error finding container ef24631aaae9891003231eadbc7ebe8e7628c81a942a4a0f3ca3b57d186e24c1: Status 404 returned error can't find the container with id ef24631aaae9891003231eadbc7ebe8e7628c81a942a4a0f3ca3b57d186e24c1 Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.111056 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.122002 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.123909 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.123944 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.123953 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.123966 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.123974 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.132223 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.140315 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.150188 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.161698 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.173016 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.181444 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.193148 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.197049 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.203745 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.215327 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.217101 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-s7n7q" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.225742 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.225781 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.225813 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.225842 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.225883 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.232178 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:12 crc kubenswrapper[4717]: W1002 14:21:12.233921 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod424c679b_8db0_4ba4_9c8f_67a65fe38048.slice/crio-a2bd1e53c4b9cbc625e6f54ab987dad2d5e5a847d5c9f8cc93b006680cd57784 WatchSource:0}: Error finding container a2bd1e53c4b9cbc625e6f54ab987dad2d5e5a847d5c9f8cc93b006680cd57784: Status 404 returned error can't find the container with id a2bd1e53c4b9cbc625e6f54ab987dad2d5e5a847d5c9f8cc93b006680cd57784 Oct 02 14:21:12 crc kubenswrapper[4717]: W1002 14:21:12.249520 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4de64e15_550a_4404_92fc_b355535a4bf2.slice/crio-dd54b5ec48286ed15d1a837e7d32fc8ea690bdc3b795510005c7abd52f66009b WatchSource:0}: Error finding container dd54b5ec48286ed15d1a837e7d32fc8ea690bdc3b795510005c7abd52f66009b: Status 404 returned error can't find the container with id dd54b5ec48286ed15d1a837e7d32fc8ea690bdc3b795510005c7abd52f66009b Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.254600 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" Oct 02 14:21:12 crc kubenswrapper[4717]: W1002 14:21:12.283210 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1d9164c_6127_4f40_ae97_942e6cd0faf2.slice/crio-f795a46d9fac832829f9b638dc3743e60f122b58991465f49f003219a213e47f WatchSource:0}: Error finding container f795a46d9fac832829f9b638dc3743e60f122b58991465f49f003219a213e47f: Status 404 returned error can't find the container with id f795a46d9fac832829f9b638dc3743e60f122b58991465f49f003219a213e47f Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.328866 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.328907 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.328919 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.329064 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.329084 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.417819 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.417915 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.417963 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.417987 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.418027 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418156 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418188 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418199 4717 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418261 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:13.418234105 +0000 UTC m=+24.270088551 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418316 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:21:13.418309827 +0000 UTC m=+24.270164273 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418374 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418385 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418393 4717 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418439 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:13.418433001 +0000 UTC m=+24.270287437 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418501 4717 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418524 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:13.418518424 +0000 UTC m=+24.270372870 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418557 4717 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:12 crc kubenswrapper[4717]: E1002 14:21:12.418597 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:13.418591126 +0000 UTC m=+24.270445572 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.431238 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.431299 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.431309 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.431326 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.431359 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.533777 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.533814 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.533827 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.533842 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.533851 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.636491 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.636524 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.636532 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.636554 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.636564 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.738960 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.739011 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.739023 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.739053 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.739066 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.841370 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.841415 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.841425 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.841442 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.841454 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.844773 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.846319 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.847195 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.848647 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.849439 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.850350 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.851217 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.851718 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.852695 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.853203 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.854081 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.854724 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.855748 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.856277 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.857141 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.857640 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.858226 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.859174 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.859984 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.860564 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.862339 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.862881 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.863422 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.865364 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.865768 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.867010 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.867669 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.868621 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.869301 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.870150 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.870590 4717 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.870685 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.872757 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.873257 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.873662 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.876482 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.877760 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.878615 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.879732 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.880503 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.881837 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.882533 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.883538 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.884126 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.889586 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.890215 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.891241 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.891962 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.892850 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.893487 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.894546 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.895109 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.895717 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.896803 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.944303 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.944340 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.944351 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.944367 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.944378 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:12Z","lastTransitionTime":"2025-10-02T14:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.957556 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s7n7q" event={"ID":"424c679b-8db0-4ba4-9c8f-67a65fe38048","Type":"ContainerStarted","Data":"0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.957605 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s7n7q" event={"ID":"424c679b-8db0-4ba4-9c8f-67a65fe38048","Type":"ContainerStarted","Data":"a2bd1e53c4b9cbc625e6f54ab987dad2d5e5a847d5c9f8cc93b006680cd57784"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.959274 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.959296 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.959306 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"8a242f2e51b4fc637835c659ec917e5e8ba1dc712513b6bc9f08ab861b2b8e9e"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.961420 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.961440 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.962860 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e" exitCode=0 Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.962897 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.962911 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"dd54b5ec48286ed15d1a837e7d32fc8ea690bdc3b795510005c7abd52f66009b"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.964922 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-5ljkq" event={"ID":"328e8bee-9892-4374-8985-28ac6cb2d377","Type":"ContainerStarted","Data":"9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.964959 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-5ljkq" event={"ID":"328e8bee-9892-4374-8985-28ac6cb2d377","Type":"ContainerStarted","Data":"ef24631aaae9891003231eadbc7ebe8e7628c81a942a4a0f3ca3b57d186e24c1"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.967102 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.969181 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.969503 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.970600 4717 generic.go:334] "Generic (PLEG): container finished" podID="e1d9164c-6127-4f40-ae97-942e6cd0faf2" containerID="7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a" exitCode=0 Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.970693 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerDied","Data":"7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.970742 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerStarted","Data":"f795a46d9fac832829f9b638dc3743e60f122b58991465f49f003219a213e47f"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.972718 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.972832 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"eeeeac05ad06feef4040a94585e5d612e09985d8bc23ca2b4167d91d523709a9"} Oct 02 14:21:12 crc kubenswrapper[4717]: I1002 14:21:12.973748 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9eac1a5cbf2688dd11eac42c3cac568997eab4d2df2550f880ee59e35dd9e7ef"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.022729 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.046801 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.046835 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.046844 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.046856 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.046865 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.051316 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.067631 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.083337 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.101484 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.117366 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.132248 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.145849 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.148480 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.148507 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.148515 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.148527 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.148536 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.160944 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.173954 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.193290 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.206300 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.254268 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.254305 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.254313 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.254329 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.254338 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.259415 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.272593 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.286992 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.300796 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.311573 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.324710 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.341456 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.352885 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.356218 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.356266 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.356277 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.356293 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.356307 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.367779 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.376512 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.390865 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.400513 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.411377 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.426184 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.453532 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.453641 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453658 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:21:15.453640246 +0000 UTC m=+26.305494692 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.453685 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.453709 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.453746 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453796 4717 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453815 4717 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453840 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:15.45382878 +0000 UTC m=+26.305683226 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453853 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:15.453847441 +0000 UTC m=+26.305701877 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453870 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453881 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453883 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453921 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453892 4717 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453959 4717 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.453984 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:15.453978024 +0000 UTC m=+26.305832470 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.454014 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:15.453997725 +0000 UTC m=+26.305852171 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.459148 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.459172 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.459184 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.459197 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.459206 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.561256 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.561289 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.561298 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.561310 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.561319 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.663791 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.663841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.663859 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.663879 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.663897 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.765890 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.765923 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.765959 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.765977 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.765990 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.838070 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.838102 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.838102 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.838199 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.838325 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:13 crc kubenswrapper[4717]: E1002 14:21:13.838440 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.869245 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.869293 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.869307 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.869325 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.869337 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.971907 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.971956 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.971964 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.971977 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.971987 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:13Z","lastTransitionTime":"2025-10-02T14:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.978756 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerStarted","Data":"35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.981893 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.981947 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.981957 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.981966 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.981976 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec"} Oct 02 14:21:13 crc kubenswrapper[4717]: I1002 14:21:13.994075 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.007950 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.020137 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.032492 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.045983 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.062430 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.074599 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.074635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.074646 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.074661 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.074670 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.075531 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.102426 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.123864 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.147048 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.158886 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.170444 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.177237 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.177270 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.177281 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.177296 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.177306 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.184865 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:14Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.279532 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.279745 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.279827 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.279894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.280003 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.382879 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.382923 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.382950 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.382972 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.382984 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.485693 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.485893 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.485971 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.486062 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.486120 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.587825 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.588035 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.588099 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.588164 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.588244 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.690738 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.690785 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.690796 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.690813 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.690828 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.793134 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.793182 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.793193 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.793210 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.793221 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.895180 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.895222 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.895230 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.895245 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.895258 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.989336 4717 generic.go:334] "Generic (PLEG): container finished" podID="e1d9164c-6127-4f40-ae97-942e6cd0faf2" containerID="35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac" exitCode=0 Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.989649 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerDied","Data":"35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.993722 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.995169 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a"} Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.998002 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.998044 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.998069 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.998098 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:14 crc kubenswrapper[4717]: I1002 14:21:14.998108 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:14Z","lastTransitionTime":"2025-10-02T14:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.005375 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.019385 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.032107 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.042068 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.056092 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.081171 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.100300 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.102769 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.102817 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.102828 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.102844 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.102855 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.124282 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.137344 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.151605 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.166858 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.183648 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.194419 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.206034 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.206061 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.206069 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.206081 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.206090 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.208006 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.220146 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.232752 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.245716 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.255589 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.266882 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.289155 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.302201 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.307887 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.307926 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.307960 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.307976 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.307986 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.318027 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.336635 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.347597 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.360475 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.370928 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:15Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.411292 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.411408 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.411427 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.411452 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.411470 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.474462 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.474595 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.474631 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.474660 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.474688 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.474772 4717 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.474825 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:19.474809861 +0000 UTC m=+30.326664327 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.474887 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:21:19.474878183 +0000 UTC m=+30.326732639 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475043 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475164 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475179 4717 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475113 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475236 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475248 4717 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475276 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:19.475266504 +0000 UTC m=+30.327120970 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475118 4717 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475307 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:19.475298435 +0000 UTC m=+30.327152891 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.475331 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:19.475324035 +0000 UTC m=+30.327178501 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.514082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.514119 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.514130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.514141 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.514150 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.616219 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.616250 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.616259 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.616271 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.616280 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.718516 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.718548 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.718557 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.718569 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.718578 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.821082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.821124 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.821133 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.821150 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.821161 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.838146 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.838180 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.838234 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.838276 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.838356 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:15 crc kubenswrapper[4717]: E1002 14:21:15.838415 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.923505 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.923555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.923571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.923593 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:15 crc kubenswrapper[4717]: I1002 14:21:15.923610 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:15Z","lastTransitionTime":"2025-10-02T14:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.006962 4717 generic.go:334] "Generic (PLEG): container finished" podID="e1d9164c-6127-4f40-ae97-942e6cd0faf2" containerID="63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483" exitCode=0 Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.007458 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerDied","Data":"63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.019877 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.025903 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.026069 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.026085 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.026106 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.026121 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.033053 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.045253 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.058304 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.071542 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.085326 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.097800 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.108352 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.122166 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.128262 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.128305 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.128316 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.128333 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.128345 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.133873 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.145810 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.164896 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.176176 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:16Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.231031 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.231052 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.231060 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.231072 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.231081 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.333717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.333757 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.333783 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.333800 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.333809 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.436077 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.436122 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.436133 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.436155 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.436166 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.538064 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.538109 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.538120 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.538136 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.538149 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.641177 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.641218 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.641230 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.641248 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.641262 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.743776 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.744121 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.744135 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.744154 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.744165 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.845623 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.845666 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.845678 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.845693 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.845702 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.947664 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.947701 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.947710 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.947726 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:16 crc kubenswrapper[4717]: I1002 14:21:16.947736 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:16Z","lastTransitionTime":"2025-10-02T14:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.021173 4717 generic.go:334] "Generic (PLEG): container finished" podID="e1d9164c-6127-4f40-ae97-942e6cd0faf2" containerID="46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a" exitCode=0 Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.021250 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerDied","Data":"46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.032842 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.038983 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.049633 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.049670 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.049684 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.049700 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.049712 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.059321 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.073417 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.086159 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.095971 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.106155 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.123252 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.136897 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.148337 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.152230 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.152269 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.152280 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.152298 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.152311 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.157874 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.167556 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.182521 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.194725 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:17Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.254968 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.254999 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.255007 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.255019 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.255027 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.357459 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.357513 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.357527 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.357544 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.357556 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.460363 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.460423 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.460441 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.460465 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.460483 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.564118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.564156 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.564167 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.564183 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.564194 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.666929 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.666991 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.667002 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.667020 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.667032 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.769331 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.769381 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.769396 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.769416 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.769446 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.838321 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:17 crc kubenswrapper[4717]: E1002 14:21:17.838521 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.838566 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.838618 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:17 crc kubenswrapper[4717]: E1002 14:21:17.838670 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:17 crc kubenswrapper[4717]: E1002 14:21:17.838797 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.872375 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.872421 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.872432 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.872446 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.872457 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.976117 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.976188 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.976207 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.976231 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:17 crc kubenswrapper[4717]: I1002 14:21:17.976251 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:17Z","lastTransitionTime":"2025-10-02T14:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.041329 4717 generic.go:334] "Generic (PLEG): container finished" podID="e1d9164c-6127-4f40-ae97-942e6cd0faf2" containerID="99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c" exitCode=0 Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.041398 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerDied","Data":"99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.060047 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.076317 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.078831 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.078919 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.078977 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.079013 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.079039 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.097917 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.117201 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.138433 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.156593 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.172781 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.184102 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.184141 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.184152 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.184169 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.184182 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.191794 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.210335 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.224556 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.243839 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.262271 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.282136 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.287062 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.287107 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.287118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.287136 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.287148 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.389818 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.389848 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.389856 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.389871 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.389880 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.493174 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.493214 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.493226 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.493240 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.493249 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.536867 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-vnsql"] Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.537321 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.540547 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.540730 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.540917 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.541775 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.552017 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.563969 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.576329 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.605805 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.605852 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.605863 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.605899 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.605915 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.619792 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.638095 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.654300 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.668033 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.677848 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.689470 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.699591 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.705170 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/58331ba8-4c83-4483-9d69-fb5c4b271c79-serviceca\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.705212 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/58331ba8-4c83-4483-9d69-fb5c4b271c79-host\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.705228 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvr8x\" (UniqueName: \"kubernetes.io/projected/58331ba8-4c83-4483-9d69-fb5c4b271c79-kube-api-access-gvr8x\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.708283 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.708316 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.708326 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.708339 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.708349 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.711657 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.723924 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.741245 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.751159 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:18Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.805771 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/58331ba8-4c83-4483-9d69-fb5c4b271c79-serviceca\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.805816 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/58331ba8-4c83-4483-9d69-fb5c4b271c79-host\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.805841 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvr8x\" (UniqueName: \"kubernetes.io/projected/58331ba8-4c83-4483-9d69-fb5c4b271c79-kube-api-access-gvr8x\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.805920 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/58331ba8-4c83-4483-9d69-fb5c4b271c79-host\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.806887 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/58331ba8-4c83-4483-9d69-fb5c4b271c79-serviceca\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.810690 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.810718 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.810731 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.810746 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.810756 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.820866 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvr8x\" (UniqueName: \"kubernetes.io/projected/58331ba8-4c83-4483-9d69-fb5c4b271c79-kube-api-access-gvr8x\") pod \"node-ca-vnsql\" (UID: \"58331ba8-4c83-4483-9d69-fb5c4b271c79\") " pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.849191 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vnsql" Oct 02 14:21:18 crc kubenswrapper[4717]: W1002 14:21:18.860168 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58331ba8_4c83_4483_9d69_fb5c4b271c79.slice/crio-32fc53f631fb1acf7c030ba1898b3e5a6cbea7dcd2130c02309677ab28068e3b WatchSource:0}: Error finding container 32fc53f631fb1acf7c030ba1898b3e5a6cbea7dcd2130c02309677ab28068e3b: Status 404 returned error can't find the container with id 32fc53f631fb1acf7c030ba1898b3e5a6cbea7dcd2130c02309677ab28068e3b Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.915096 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.915126 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.915135 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.915150 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:18 crc kubenswrapper[4717]: I1002 14:21:18.915161 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:18Z","lastTransitionTime":"2025-10-02T14:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.023741 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.025442 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.025454 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.025466 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.025475 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.044675 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vnsql" event={"ID":"58331ba8-4c83-4483-9d69-fb5c4b271c79","Type":"ContainerStarted","Data":"32fc53f631fb1acf7c030ba1898b3e5a6cbea7dcd2130c02309677ab28068e3b"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.048133 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.048377 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.048528 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.053342 4717 generic.go:334] "Generic (PLEG): container finished" podID="e1d9164c-6127-4f40-ae97-942e6cd0faf2" containerID="71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137" exitCode=0 Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.053387 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerDied","Data":"71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.062184 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.072605 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.072925 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.073202 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.088337 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.105140 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.117358 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.127565 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.127618 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.127630 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.127645 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.127654 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.129834 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.142616 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.151115 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.160439 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.169511 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.182453 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.193092 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.204491 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.212573 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.220295 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.229648 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.230402 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.230440 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.230451 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.230466 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.230476 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.238376 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.247974 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.262006 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.270822 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.280696 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.293712 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.305669 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.315607 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.326897 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.333001 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.333052 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.333065 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.333082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.333094 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.340232 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.351692 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.365714 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:19Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.435837 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.435872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.435882 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.435896 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.435906 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.512679 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.513038 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.513091 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.513127 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513169 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:21:27.513143267 +0000 UTC m=+38.364997713 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.513199 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513231 4717 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513286 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:27.51327276 +0000 UTC m=+38.365127216 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513315 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513353 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513376 4717 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513536 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513550 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513559 4717 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513588 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:27.513580558 +0000 UTC m=+38.365435004 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513631 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:27.513611039 +0000 UTC m=+38.365465525 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513755 4717 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.513801 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:27.513786204 +0000 UTC m=+38.365640650 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.538272 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.538329 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.538342 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.538360 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.538372 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.640872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.640914 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.640926 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.640963 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.640974 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.743964 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.744007 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.744019 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.744038 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.744049 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.837799 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.837826 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.837973 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.838143 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.838313 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:19 crc kubenswrapper[4717]: E1002 14:21:19.838457 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.845866 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.846000 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.846092 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.846183 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.846270 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.948772 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.948802 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.948810 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.948822 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:19 crc kubenswrapper[4717]: I1002 14:21:19.948830 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:19Z","lastTransitionTime":"2025-10-02T14:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.051998 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.052245 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.052336 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.052404 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.052461 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.058451 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vnsql" event={"ID":"58331ba8-4c83-4483-9d69-fb5c4b271c79","Type":"ContainerStarted","Data":"bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.062876 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" event={"ID":"e1d9164c-6127-4f40-ae97-942e6cd0faf2","Type":"ContainerStarted","Data":"2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.062923 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.072431 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.086071 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.097379 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.110253 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.127224 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.138793 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.151854 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.154742 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.154781 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.154792 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.154828 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.154838 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.163653 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.174182 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.185700 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.196344 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.208232 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.220286 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.234589 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.247516 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.258653 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.258905 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.259042 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.259159 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.259245 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.262146 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.274653 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.299309 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.315293 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.330403 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.348066 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.361051 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.366228 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.366280 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.366292 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.366311 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.366323 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.374436 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.391228 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.403563 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.416513 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.431347 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.442023 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.468964 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.468998 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.469009 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.469023 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.469034 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.572217 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.572296 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.572308 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.572323 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.572335 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.674893 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.674925 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.674951 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.674965 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.674976 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.777352 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.777379 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.777388 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.777401 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.777409 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.855055 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.868926 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.879463 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.879487 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.879495 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.879509 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.879520 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.883337 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.893622 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.910390 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.923456 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.935417 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.944983 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.956292 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.964946 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.975896 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.982596 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.982627 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.982635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.982650 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.982658 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:20Z","lastTransitionTime":"2025-10-02T14:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:20 crc kubenswrapper[4717]: I1002 14:21:20.987628 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.000841 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.017238 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.071804 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.085256 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.085320 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.085339 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.085364 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.085383 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.188021 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.188068 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.188077 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.188092 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.188102 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.290400 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.290444 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.290454 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.290476 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.290495 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.393239 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.393315 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.393333 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.393359 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.393380 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.495970 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.496026 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.496041 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.496062 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.496078 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.598549 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.598619 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.598629 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.598652 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.598662 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.700774 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.700816 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.700866 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.700881 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.700891 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.803717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.803768 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.803777 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.803791 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.803801 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.829301 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.829340 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.829351 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.829369 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.829377 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.838866 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.838907 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.838991 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.839031 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.839179 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.839300 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.843579 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.848352 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.848418 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.848434 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.848454 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.848468 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.863335 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.866844 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.866875 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.866885 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.866903 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.866912 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.879145 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.882510 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.882553 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.882562 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.882578 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.882587 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.894363 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.898685 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.898750 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.898761 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.898775 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.898785 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.910658 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:21 crc kubenswrapper[4717]: E1002 14:21:21.910769 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.912345 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.912379 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.912429 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.912446 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:21 crc kubenswrapper[4717]: I1002 14:21:21.912457 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:21Z","lastTransitionTime":"2025-10-02T14:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.014706 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.014743 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.014751 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.014764 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.014772 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.116906 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.116957 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.116967 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.116979 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.116990 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.218968 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.219013 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.219023 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.219038 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.219051 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.321415 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.321456 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.321473 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.321491 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.321508 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.423711 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.423759 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.423773 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.423792 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.423804 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.527214 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.527279 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.527296 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.527319 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.527335 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.629804 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.629865 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.629883 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.629908 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.629925 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.731787 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.731832 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.731842 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.731856 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.731877 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.833502 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.833552 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.833564 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.833585 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.833610 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.936846 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.936908 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.936962 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.936992 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:22 crc kubenswrapper[4717]: I1002 14:21:22.937011 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:22Z","lastTransitionTime":"2025-10-02T14:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.039523 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.039569 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.039580 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.039595 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.039607 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.079844 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/0.log" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.083177 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199" exitCode=1 Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.083231 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.083963 4717 scope.go:117] "RemoveContainer" containerID="277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.107918 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.121908 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.136550 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.141822 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.141860 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.141869 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.141905 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.141918 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.152188 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.172710 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:22Z\\\",\\\"message\\\":\\\") from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:21:21.876368 5947 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1002 14:21:21.876442 5947 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:21:21.876476 5947 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:21:21.876477 5947 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 14:21:21.876495 5947 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:21:21.876532 5947 factory.go:656] Stopping watch factory\\\\nI1002 14:21:21.876568 5947 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:21:21.876604 5947 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:21:21.876624 5947 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 14:21:21.876673 5947 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:21:21.876686 5947 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 14:21:21.876787 5947 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.184440 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.197125 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.207657 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.215885 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.224795 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.233769 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.244285 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.244327 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.244338 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.244354 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.244367 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.246345 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.257563 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.269610 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.347783 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.347831 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.347847 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.347870 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.347890 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.450504 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.450544 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.450555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.450571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.450585 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.553074 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.553112 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.553120 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.553134 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.553143 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.654835 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.654881 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.654893 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.654909 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.654920 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.756739 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.756765 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.756772 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.756784 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.756793 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.838634 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:23 crc kubenswrapper[4717]: E1002 14:21:23.838750 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.838809 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:23 crc kubenswrapper[4717]: E1002 14:21:23.838862 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.838901 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:23 crc kubenswrapper[4717]: E1002 14:21:23.838973 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.860206 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.860253 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.860274 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.860305 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.860331 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.963416 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.963470 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.963484 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.963503 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:23 crc kubenswrapper[4717]: I1002 14:21:23.963517 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:23Z","lastTransitionTime":"2025-10-02T14:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.066724 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.066788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.066807 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.066838 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.066863 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.091318 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/0.log" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.095927 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.125716 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24"] Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.126429 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: W1002 14:21:24.128650 4717 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert": failed to list *v1.Secret: secrets "ovn-control-plane-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Oct 02 14:21:24 crc kubenswrapper[4717]: E1002 14:21:24.128809 4717 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-control-plane-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-control-plane-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 02 14:21:24 crc kubenswrapper[4717]: W1002 14:21:24.129091 4717 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd": failed to list *v1.Secret: secrets "ovn-kubernetes-control-plane-dockercfg-gs7dd" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Oct 02 14:21:24 crc kubenswrapper[4717]: E1002 14:21:24.129192 4717 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-kubernetes-control-plane-dockercfg-gs7dd\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-kubernetes-control-plane-dockercfg-gs7dd\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.149686 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.168338 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.169323 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.169346 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.169355 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.169372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.169383 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.182110 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.198367 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.210108 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.222762 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.236847 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.254372 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:22Z\\\",\\\"message\\\":\\\") from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:21:21.876368 5947 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1002 14:21:21.876442 5947 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:21:21.876476 5947 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:21:21.876477 5947 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 14:21:21.876495 5947 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:21:21.876532 5947 factory.go:656] Stopping watch factory\\\\nI1002 14:21:21.876568 5947 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:21:21.876604 5947 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:21:21.876624 5947 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 14:21:21.876673 5947 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:21:21.876686 5947 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 14:21:21.876787 5947 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.259288 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.259341 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfbkw\" (UniqueName: \"kubernetes.io/projected/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-kube-api-access-lfbkw\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.259366 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.259403 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.269341 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.271132 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.271162 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.271173 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.271190 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.271201 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.285406 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.295571 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.306447 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.316973 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.332786 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.352541 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:24Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.359773 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.359850 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.359884 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfbkw\" (UniqueName: \"kubernetes.io/projected/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-kube-api-access-lfbkw\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.359906 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.360399 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.360557 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.373511 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.373555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.373571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.373591 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.373605 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.387257 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfbkw\" (UniqueName: \"kubernetes.io/projected/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-kube-api-access-lfbkw\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.476176 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.476255 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.476267 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.476282 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.476292 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.578797 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.578828 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.578836 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.578847 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.578858 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.681338 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.681388 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.681403 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.681422 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.681437 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.784426 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.784496 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.784515 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.784548 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.784571 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.886862 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.886955 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.886968 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.886986 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.886997 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.990174 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.990256 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.990275 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.990301 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:24 crc kubenswrapper[4717]: I1002 14:21:24.990317 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:24Z","lastTransitionTime":"2025-10-02T14:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.093402 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.093474 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.093502 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.093540 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.093569 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.099265 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.120701 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.135686 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.153060 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.164509 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.165821 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.174488 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3a32c8d3-7320-4eb1-abfb-bbcf19602b23-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pjj24\" (UID: \"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.177068 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.197141 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.197225 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.197236 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.197255 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.197267 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.197777 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.208987 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.222391 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.239436 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:22Z\\\",\\\"message\\\":\\\") from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:21:21.876368 5947 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1002 14:21:21.876442 5947 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:21:21.876476 5947 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:21:21.876477 5947 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 14:21:21.876495 5947 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:21:21.876532 5947 factory.go:656] Stopping watch factory\\\\nI1002 14:21:21.876568 5947 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:21:21.876604 5947 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:21:21.876624 5947 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 14:21:21.876673 5947 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:21:21.876686 5947 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 14:21:21.876787 5947 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.250843 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.264471 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.275858 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.289592 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.302262 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.302314 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.302325 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.302339 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.302351 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.303796 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.315184 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.405097 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.405315 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.405386 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.405467 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.405551 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.509507 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.509564 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.509583 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.509609 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.509628 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.607972 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.609824 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.612601 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.612713 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.612784 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.612820 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.612889 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.617722 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-7v6wt"] Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.618539 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:25 crc kubenswrapper[4717]: E1002 14:21:25.618636 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.633559 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.656271 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.670609 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.687868 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.708433 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.716044 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.716117 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.716143 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.716175 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.716200 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.722059 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.740080 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.751577 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.769995 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.777147 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kgf6\" (UniqueName: \"kubernetes.io/projected/36c8fcb0-a074-461c-a5d1-c01106ee4997-kube-api-access-8kgf6\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.777286 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.782581 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.794321 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: W1002 14:21:25.808336 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a32c8d3_7320_4eb1_abfb_bbcf19602b23.slice/crio-a98bdb158f1c08856e5d8ae4b1454ead4931e2cfec9388c35079e65f9e593efa WatchSource:0}: Error finding container a98bdb158f1c08856e5d8ae4b1454ead4931e2cfec9388c35079e65f9e593efa: Status 404 returned error can't find the container with id a98bdb158f1c08856e5d8ae4b1454ead4931e2cfec9388c35079e65f9e593efa Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.809029 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.819238 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.819276 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.819287 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.819301 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.819313 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.830498 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.838640 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:25 crc kubenswrapper[4717]: E1002 14:21:25.838794 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.838646 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:25 crc kubenswrapper[4717]: E1002 14:21:25.839070 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.839085 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:25 crc kubenswrapper[4717]: E1002 14:21:25.839398 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.850089 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.870895 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:22Z\\\",\\\"message\\\":\\\") from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:21:21.876368 5947 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1002 14:21:21.876442 5947 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:21:21.876476 5947 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:21:21.876477 5947 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 14:21:21.876495 5947 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:21:21.876532 5947 factory.go:656] Stopping watch factory\\\\nI1002 14:21:21.876568 5947 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:21:21.876604 5947 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:21:21.876624 5947 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 14:21:21.876673 5947 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:21:21.876686 5947 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 14:21:21.876787 5947 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.878840 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kgf6\" (UniqueName: \"kubernetes.io/projected/36c8fcb0-a074-461c-a5d1-c01106ee4997-kube-api-access-8kgf6\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.878993 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:25 crc kubenswrapper[4717]: E1002 14:21:25.879168 4717 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:25 crc kubenswrapper[4717]: E1002 14:21:25.879228 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs podName:36c8fcb0-a074-461c-a5d1-c01106ee4997 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:26.379212519 +0000 UTC m=+37.231066985 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs") pod "network-metrics-daemon-7v6wt" (UID: "36c8fcb0-a074-461c-a5d1-c01106ee4997") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.883957 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:25Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.899589 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kgf6\" (UniqueName: \"kubernetes.io/projected/36c8fcb0-a074-461c-a5d1-c01106ee4997-kube-api-access-8kgf6\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.921461 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.921497 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.921505 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.921519 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:25 crc kubenswrapper[4717]: I1002 14:21:25.921529 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:25Z","lastTransitionTime":"2025-10-02T14:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.023394 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.023425 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.023433 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.023446 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.023455 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.102516 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" event={"ID":"3a32c8d3-7320-4eb1-abfb-bbcf19602b23","Type":"ContainerStarted","Data":"a98bdb158f1c08856e5d8ae4b1454ead4931e2cfec9388c35079e65f9e593efa"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.125499 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.125535 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.125546 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.125561 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.125574 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.227424 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.227458 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.227466 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.227481 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.227490 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.330118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.330155 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.330165 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.330180 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.330189 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.383547 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:26 crc kubenswrapper[4717]: E1002 14:21:26.383675 4717 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:26 crc kubenswrapper[4717]: E1002 14:21:26.383729 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs podName:36c8fcb0-a074-461c-a5d1-c01106ee4997 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:27.383714408 +0000 UTC m=+38.235568854 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs") pod "network-metrics-daemon-7v6wt" (UID: "36c8fcb0-a074-461c-a5d1-c01106ee4997") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.432065 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.432116 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.432126 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.432141 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.432150 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.534536 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.534569 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.534577 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.534590 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.534599 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.637313 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.637356 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.637366 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.637381 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.637392 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.739109 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.739168 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.739182 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.739198 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.739210 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.841075 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.841127 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.841138 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.841150 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.841158 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.943199 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.943242 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.943254 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.943270 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:26 crc kubenswrapper[4717]: I1002 14:21:26.943281 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:26Z","lastTransitionTime":"2025-10-02T14:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.045109 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.045156 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.045167 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.045184 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.045195 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.106616 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" event={"ID":"3a32c8d3-7320-4eb1-abfb-bbcf19602b23","Type":"ContainerStarted","Data":"0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.147707 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.147793 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.147817 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.147841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.147861 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.250773 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.251113 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.251162 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.251180 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.251192 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.354024 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.354057 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.354065 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.354080 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.354088 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.393993 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.394170 4717 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.394235 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs podName:36c8fcb0-a074-461c-a5d1-c01106ee4997 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:29.394214793 +0000 UTC m=+40.246069259 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs") pod "network-metrics-daemon-7v6wt" (UID: "36c8fcb0-a074-461c-a5d1-c01106ee4997") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.455739 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.455780 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.455791 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.455805 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.455815 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.558645 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.558695 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.558708 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.558727 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.558740 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.596270 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.596448 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596469 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:21:43.596439704 +0000 UTC m=+54.448294170 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.596514 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.596563 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596599 4717 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596684 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:43.59666571 +0000 UTC m=+54.448520156 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596691 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596709 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596724 4717 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596729 4717 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.596618 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596767 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:43.596757332 +0000 UTC m=+54.448611788 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596848 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596887 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596902 4717 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.596858 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:43.596832924 +0000 UTC m=+54.448687480 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.597033 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:43.597008509 +0000 UTC m=+54.448863035 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.660955 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.661006 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.661022 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.661041 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.661054 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.764127 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.764182 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.764196 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.764216 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.764228 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.838117 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.838242 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.838431 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.838512 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.838707 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.838802 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.838967 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:27 crc kubenswrapper[4717]: E1002 14:21:27.839213 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.866181 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.866216 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.866228 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.866242 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.866261 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.968841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.968886 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.968899 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.968916 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:27 crc kubenswrapper[4717]: I1002 14:21:27.968949 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:27Z","lastTransitionTime":"2025-10-02T14:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.071329 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.071370 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.071381 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.071396 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.071405 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.110886 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/1.log" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.111555 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/0.log" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.114202 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2" exitCode=1 Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.114268 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.114322 4717 scope.go:117] "RemoveContainer" containerID="277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.115501 4717 scope.go:117] "RemoveContainer" containerID="d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2" Oct 02 14:21:28 crc kubenswrapper[4717]: E1002 14:21:28.115759 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.116697 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" event={"ID":"3a32c8d3-7320-4eb1-abfb-bbcf19602b23","Type":"ContainerStarted","Data":"2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.133311 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.149086 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.161725 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.174228 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.174313 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.174340 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.174348 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.174364 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.174373 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.183693 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.194451 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.204571 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.216563 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.235178 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:22Z\\\",\\\"message\\\":\\\") from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:21:21.876368 5947 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1002 14:21:21.876442 5947 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:21:21.876476 5947 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:21:21.876477 5947 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 14:21:21.876495 5947 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:21:21.876532 5947 factory.go:656] Stopping watch factory\\\\nI1002 14:21:21.876568 5947 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:21:21.876604 5947 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:21:21.876624 5947 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 14:21:21.876673 5947 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:21:21.876686 5947 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 14:21:21.876787 5947 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.245773 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.256785 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.267331 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.276469 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.276503 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.276514 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.276529 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.276540 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.278860 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.290326 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.307964 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.325446 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.344229 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.356793 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.368785 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.378404 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.378438 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.378446 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.378459 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.378469 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.381685 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.391484 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.400980 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.412335 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.423453 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.433548 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.452059 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:22Z\\\",\\\"message\\\":\\\") from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:21:21.876368 5947 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1002 14:21:21.876442 5947 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:21:21.876476 5947 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:21:21.876477 5947 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 14:21:21.876495 5947 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:21:21.876532 5947 factory.go:656] Stopping watch factory\\\\nI1002 14:21:21.876568 5947 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:21:21.876604 5947 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:21:21.876624 5947 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 14:21:21.876673 5947 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:21:21.876686 5947 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 14:21:21.876787 5947 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.463410 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.475773 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.480607 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.480651 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.480663 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.480679 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.480689 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.488403 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.499684 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.509738 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.521127 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:28Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.583542 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.583582 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.583594 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.583609 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.583620 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.685676 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.686198 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.686358 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.686496 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.686625 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.790603 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.790675 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.790694 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.790717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.790735 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.893050 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.893120 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.893139 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.893167 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.893186 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.996403 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.996466 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.996483 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.996507 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:28 crc kubenswrapper[4717]: I1002 14:21:28.996523 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:28Z","lastTransitionTime":"2025-10-02T14:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.099314 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.099383 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.099400 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.099427 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.099444 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.202137 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.202213 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.202235 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.202263 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.202280 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.305380 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.305461 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.305483 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.305516 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.305542 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.408329 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.408355 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.408364 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.408378 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.408389 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.415337 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:29 crc kubenswrapper[4717]: E1002 14:21:29.415487 4717 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:29 crc kubenswrapper[4717]: E1002 14:21:29.415526 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs podName:36c8fcb0-a074-461c-a5d1-c01106ee4997 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:33.415513782 +0000 UTC m=+44.267368228 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs") pod "network-metrics-daemon-7v6wt" (UID: "36c8fcb0-a074-461c-a5d1-c01106ee4997") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.511894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.511928 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.511955 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.511970 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.511978 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.614499 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.614539 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.614547 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.614562 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.614573 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.716636 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.716680 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.716688 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.716704 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.716714 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.755978 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.768692 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.781022 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.792415 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.804766 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.815849 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.818776 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.818799 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.818807 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.818819 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.818829 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.826456 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.838047 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.839347 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.839410 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.839440 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:29 crc kubenswrapper[4717]: E1002 14:21:29.839501 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:29 crc kubenswrapper[4717]: E1002 14:21:29.839566 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:29 crc kubenswrapper[4717]: E1002 14:21:29.839665 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.839955 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:29 crc kubenswrapper[4717]: E1002 14:21:29.840035 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.849979 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.863724 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.873545 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.881875 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.897587 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:22Z\\\",\\\"message\\\":\\\") from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:21:21.876368 5947 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1002 14:21:21.876442 5947 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:21:21.876476 5947 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:21:21.876477 5947 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 14:21:21.876495 5947 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:21:21.876532 5947 factory.go:656] Stopping watch factory\\\\nI1002 14:21:21.876568 5947 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:21:21.876604 5947 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:21:21.876624 5947 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 14:21:21.876673 5947 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:21:21.876686 5947 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 14:21:21.876787 5947 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.906738 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.917403 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.922294 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.922347 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.922356 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.922401 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.922415 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:29Z","lastTransitionTime":"2025-10-02T14:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.933694 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:29 crc kubenswrapper[4717]: I1002 14:21:29.945428 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:29Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.024416 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.024675 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.024750 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.024818 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.024886 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.124075 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/1.log" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.127668 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.127696 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.127707 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.127721 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.127732 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.229734 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.229765 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.229774 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.229787 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.229810 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.331564 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.331839 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.331951 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.332033 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.332111 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.434271 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.434317 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.434328 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.434345 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.434356 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.537258 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.537533 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.537596 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.537700 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.537788 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.641532 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.641585 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.641600 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.641625 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.641641 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.744814 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.744859 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.744873 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.744892 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.744906 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.847521 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.847560 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.847570 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.847583 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.847594 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.854067 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.866681 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.880341 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.914045 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://277939e1637c333d639a2c04901e078bddfa8b94df30dfdf09496ac79b709199\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:22Z\\\",\\\"message\\\":\\\") from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:21:21.876368 5947 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1002 14:21:21.876442 5947 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:21:21.876476 5947 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:21:21.876477 5947 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1002 14:21:21.876495 5947 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:21:21.876532 5947 factory.go:656] Stopping watch factory\\\\nI1002 14:21:21.876568 5947 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:21:21.876604 5947 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:21:21.876624 5947 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1002 14:21:21.876673 5947 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:21:21.876686 5947 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1002 14:21:21.876787 5947 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.932326 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.949407 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.950095 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.950216 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.950278 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.950350 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.950425 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:30Z","lastTransitionTime":"2025-10-02T14:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.962732 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.974276 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.984670 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:30 crc kubenswrapper[4717]: I1002 14:21:30.996687 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.005841 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.016191 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.028751 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.044347 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.052395 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.052700 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.052788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.052855 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.052922 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.058222 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.070653 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.155702 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.155745 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.155755 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.155770 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.155780 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.258524 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.258567 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.258578 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.258599 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.258610 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.360802 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.360837 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.360845 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.360858 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.360867 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.463412 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.463456 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.463468 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.463484 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.463497 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.566402 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.566475 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.566488 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.566505 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.566519 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.668056 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.668269 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.668374 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.668500 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.668591 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.771078 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.771429 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.771659 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.771922 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.772183 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.838109 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:31 crc kubenswrapper[4717]: E1002 14:21:31.838224 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.838467 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.838499 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:31 crc kubenswrapper[4717]: E1002 14:21:31.838737 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.838542 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:31 crc kubenswrapper[4717]: E1002 14:21:31.838818 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:31 crc kubenswrapper[4717]: E1002 14:21:31.838666 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.874588 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.874762 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.874904 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.875079 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.875253 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.978274 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.978505 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.978574 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.978640 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:31 crc kubenswrapper[4717]: I1002 14:21:31.978699 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:31Z","lastTransitionTime":"2025-10-02T14:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.016091 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.016298 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.016391 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.016455 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.016520 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: E1002 14:21:32.031287 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:32Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.035424 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.035465 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.035478 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.035493 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.035504 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: E1002 14:21:32.049099 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:32Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.053139 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.053312 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.053446 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.053571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.053656 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: E1002 14:21:32.066182 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:32Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.070686 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.070864 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.071017 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.071118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.071200 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: E1002 14:21:32.090126 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:32Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.100212 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.100254 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.100269 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.100286 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.100296 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: E1002 14:21:32.114617 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:32Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:32 crc kubenswrapper[4717]: E1002 14:21:32.115004 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.116800 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.116947 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.117038 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.117100 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.117161 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.220787 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.221124 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.221240 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.221563 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.221707 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.324635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.324671 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.324678 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.324691 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.324701 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.427376 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.427411 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.427422 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.427436 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.427446 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.529913 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.529979 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.529990 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.530007 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.530018 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.632554 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.632597 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.632610 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.632628 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.632642 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.734716 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.734770 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.734787 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.734810 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.734825 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.837031 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.837106 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.837116 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.837129 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.837138 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.939736 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.939771 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.939780 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.939793 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:32 crc kubenswrapper[4717]: I1002 14:21:32.939802 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:32Z","lastTransitionTime":"2025-10-02T14:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.041841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.041883 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.041895 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.041912 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.041922 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.143886 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.144041 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.144060 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.144082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.144101 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.246799 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.246858 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.246867 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.246884 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.246898 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.349589 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.349647 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.349663 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.349688 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.349704 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.451855 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.451926 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.451979 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.452010 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.452032 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.454409 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:33 crc kubenswrapper[4717]: E1002 14:21:33.454597 4717 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:33 crc kubenswrapper[4717]: E1002 14:21:33.454708 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs podName:36c8fcb0-a074-461c-a5d1-c01106ee4997 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:41.45467571 +0000 UTC m=+52.306530206 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs") pod "network-metrics-daemon-7v6wt" (UID: "36c8fcb0-a074-461c-a5d1-c01106ee4997") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.555032 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.555100 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.555124 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.555154 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.555180 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.657374 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.657403 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.657413 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.657428 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.657438 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.759194 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.759236 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.759251 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.759267 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.759277 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.837884 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.837951 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.837904 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:33 crc kubenswrapper[4717]: E1002 14:21:33.838025 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.837883 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:33 crc kubenswrapper[4717]: E1002 14:21:33.838094 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:33 crc kubenswrapper[4717]: E1002 14:21:33.838135 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:33 crc kubenswrapper[4717]: E1002 14:21:33.838184 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.861557 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.861612 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.861624 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.861643 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.861655 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.964030 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.964073 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.964084 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.964101 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:33 crc kubenswrapper[4717]: I1002 14:21:33.964111 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:33Z","lastTransitionTime":"2025-10-02T14:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.065876 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.065918 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.065948 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.065965 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.065975 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.167549 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.167581 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.167590 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.167604 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.167614 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.271073 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.271112 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.271122 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.271136 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.271148 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.373140 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.373173 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.373186 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.373201 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.373213 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.476223 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.476297 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.476306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.476323 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.476366 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.579309 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.579358 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.579371 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.579393 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.579407 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.682061 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.682110 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.682125 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.682146 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.682163 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.784704 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.784770 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.784788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.784820 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.784860 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.887624 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.887687 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.887699 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.887720 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.887736 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.991346 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.991421 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.991433 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.991451 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:34 crc kubenswrapper[4717]: I1002 14:21:34.991463 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:34Z","lastTransitionTime":"2025-10-02T14:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.094511 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.094559 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.094570 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.094586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.094596 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.197234 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.197319 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.197340 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.197372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.197399 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.300103 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.300142 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.300150 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.300163 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.300172 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.402363 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.402625 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.402724 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.402794 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.402913 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.505863 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.505912 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.505956 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.505972 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.505981 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.608673 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.608715 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.608727 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.608743 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.608753 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.712547 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.713547 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.713724 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.713919 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.714140 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.817573 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.817625 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.817637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.817656 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.817668 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.838254 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.838285 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.839149 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:35 crc kubenswrapper[4717]: E1002 14:21:35.839356 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.839374 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:35 crc kubenswrapper[4717]: E1002 14:21:35.839458 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:35 crc kubenswrapper[4717]: E1002 14:21:35.839484 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:35 crc kubenswrapper[4717]: E1002 14:21:35.839547 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.919725 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.919782 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.919799 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.919862 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:35 crc kubenswrapper[4717]: I1002 14:21:35.919880 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:35Z","lastTransitionTime":"2025-10-02T14:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.022411 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.022460 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.022469 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.022486 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.022497 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.124985 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.125026 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.125034 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.125048 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.125059 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.227570 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.227597 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.227606 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.227618 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.227627 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.329991 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.330066 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.330073 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.330085 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.330094 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.432901 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.432964 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.432974 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.432989 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.432999 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.535803 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.535848 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.535857 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.535873 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.535882 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.638391 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.638426 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.638436 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.638450 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.638459 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.741478 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.741527 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.741538 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.741555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.741567 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.844630 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.844695 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.844712 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.844735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.844758 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.948908 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.949006 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.949028 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.949055 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:36 crc kubenswrapper[4717]: I1002 14:21:36.949073 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:36Z","lastTransitionTime":"2025-10-02T14:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.051574 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.051610 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.051619 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.051634 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.051643 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.154356 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.154428 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.154446 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.154475 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.154493 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.257045 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.257105 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.257118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.257142 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.257157 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.359834 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.359902 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.359980 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.360007 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.360019 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.463089 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.463152 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.463174 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.463204 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.463229 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.566186 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.566276 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.566294 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.566317 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.566335 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.669178 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.669231 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.669247 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.669269 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.669287 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.771528 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.771575 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.771588 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.771605 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.771617 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.838120 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:37 crc kubenswrapper[4717]: E1002 14:21:37.838312 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.838837 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.838973 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:37 crc kubenswrapper[4717]: E1002 14:21:37.838972 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.839023 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:37 crc kubenswrapper[4717]: E1002 14:21:37.839111 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:37 crc kubenswrapper[4717]: E1002 14:21:37.839300 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.874924 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.874978 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.875004 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.875018 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.875027 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.979090 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.979160 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.979177 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.979204 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:37 crc kubenswrapper[4717]: I1002 14:21:37.979222 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:37Z","lastTransitionTime":"2025-10-02T14:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.082025 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.082055 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.082064 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.082076 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.082085 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.185623 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.185672 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.185687 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.185703 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.185715 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.288601 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.288678 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.288702 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.288728 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.288752 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.391305 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.391339 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.391350 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.391367 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.391378 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.493515 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.493572 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.493584 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.493601 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.493617 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.597024 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.597126 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.597145 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.597167 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.597184 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.700115 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.700158 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.700168 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.700183 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.700197 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.802899 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.802958 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.802975 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.802989 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.802998 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.905484 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.905514 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.905522 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.905534 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:38 crc kubenswrapper[4717]: I1002 14:21:38.905544 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:38Z","lastTransitionTime":"2025-10-02T14:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.008168 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.008238 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.008255 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.008280 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.008297 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.110963 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.111002 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.111014 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.111048 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.111056 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.214642 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.214702 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.214717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.214735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.214753 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.317224 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.317251 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.317259 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.317272 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.317280 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.419159 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.419202 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.419210 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.419225 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.419234 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.521469 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.521504 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.521512 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.521525 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.521533 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.624075 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.624102 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.624111 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.624125 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.624136 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.726281 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.726325 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.726335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.726349 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.726359 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.829113 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.829156 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.829168 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.829185 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.829200 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.838354 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.838381 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.838370 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.838443 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:39 crc kubenswrapper[4717]: E1002 14:21:39.838558 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:39 crc kubenswrapper[4717]: E1002 14:21:39.838830 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:39 crc kubenswrapper[4717]: E1002 14:21:39.838897 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.839094 4717 scope.go:117] "RemoveContainer" containerID="d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2" Oct 02 14:21:39 crc kubenswrapper[4717]: E1002 14:21:39.839079 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.861341 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.876992 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.896546 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.914503 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.927370 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.931421 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.931466 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.931478 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.931497 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.931510 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:39Z","lastTransitionTime":"2025-10-02T14:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.940031 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.955597 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.973743 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:39 crc kubenswrapper[4717]: I1002 14:21:39.987073 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:39Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.008055 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.022594 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.034247 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.034280 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.034292 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.034311 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.034322 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.040770 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.062595 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.076395 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.088451 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.098824 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.137599 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.137646 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.137656 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.137674 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.137685 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.158523 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/1.log" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.161133 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.161303 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.179292 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.194604 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.210528 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.220742 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.231556 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.241389 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.241424 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.241435 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.241451 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.241466 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.248813 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.264073 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.277945 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.297338 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.310240 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.326642 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.338707 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.346384 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.346461 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.346526 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.346539 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.346570 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.346584 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.361958 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.375735 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.390654 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.404100 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.450330 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.450409 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.450418 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.450441 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.450454 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.553235 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.553278 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.553290 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.553306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.553315 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.655698 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.655726 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.655734 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.655747 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.655756 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.758264 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.758307 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.758316 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.758331 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.758342 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.850603 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.860425 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.860473 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.860490 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.860529 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.860539 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.865775 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.870245 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.878075 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.882613 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.896983 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.915143 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.926030 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.938860 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.954663 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.962870 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.962919 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.962950 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.962967 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.962978 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:40Z","lastTransitionTime":"2025-10-02T14:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.964704 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.975596 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.985214 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:40 crc kubenswrapper[4717]: I1002 14:21:40.995106 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:40Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.012083 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.022590 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.034028 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.050217 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.063627 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.065448 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.065482 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.065495 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.065512 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.065526 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.076491 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.087967 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.101964 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.111842 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.122712 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.134355 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.151353 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.165566 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.167289 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/2.log" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.167857 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.168032 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/1.log" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.168141 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.168297 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.168438 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.168541 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.171102 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03" exitCode=1 Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.171146 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.171207 4717 scope.go:117] "RemoveContainer" containerID="d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.172548 4717 scope.go:117] "RemoveContainer" containerID="b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03" Oct 02 14:21:41 crc kubenswrapper[4717]: E1002 14:21:41.172870 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.182300 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.196390 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.208691 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.219921 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.232862 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.244845 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.258784 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.271488 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.271531 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.271543 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.271560 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.271572 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.274635 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.287680 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.299563 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.310844 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.326436 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.340031 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.355886 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.372179 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.373597 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.373652 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.373665 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.373684 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.373699 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.384818 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.400170 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.414016 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.425257 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.436661 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.448363 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.463240 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.475579 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.475618 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.475628 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.475643 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.475653 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.482358 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1a5dd4abe2d9705c6e78b827d5439f26066a86d3375dfc35c3394e301f02fc2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:27Z\\\",\\\"message\\\":\\\" *v1.Pod openshift-machine-config-operator/machine-config-daemon-sk55f after 0 failed attempt(s)\\\\nI1002 14:21:26.759799 6131 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-sk55f\\\\nI1002 14:21:26.759606 6131 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1002 14:21:26.759813 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf after 0 failed attempt(s)\\\\nI1002 14:21:26.759818 6131 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1002 14:21:26.759575 6131 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-5ljkq in node crc\\\\nI1002 14:21:26.759829 6131 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-5ljkq after 0 failed attempt(s)\\\\nI1002 14:21:26.759834 6131 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-5ljkq\\\\nI1002 14:21:26.759824 6131 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chass\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"message\\\":\\\"tplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1002 14:21:40.865655 6342 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1002 14:21:40.865669 6342 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1002 14:21:40.865730 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.492198 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.502491 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:41Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.545975 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:41 crc kubenswrapper[4717]: E1002 14:21:41.546149 4717 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:41 crc kubenswrapper[4717]: E1002 14:21:41.546215 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs podName:36c8fcb0-a074-461c-a5d1-c01106ee4997 nodeName:}" failed. No retries permitted until 2025-10-02 14:21:57.546198195 +0000 UTC m=+68.398052641 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs") pod "network-metrics-daemon-7v6wt" (UID: "36c8fcb0-a074-461c-a5d1-c01106ee4997") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.577812 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.577870 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.577882 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.577898 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.577909 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.680385 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.680432 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.680449 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.680463 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.680472 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.783115 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.783162 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.783174 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.783192 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.783203 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.837791 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.837818 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.837864 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:41 crc kubenswrapper[4717]: E1002 14:21:41.837907 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:41 crc kubenswrapper[4717]: E1002 14:21:41.838089 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.837879 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:41 crc kubenswrapper[4717]: E1002 14:21:41.838226 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:41 crc kubenswrapper[4717]: E1002 14:21:41.838283 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.885386 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.885451 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.885463 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.885482 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.885495 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.988039 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.988105 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.988130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.988159 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:41 crc kubenswrapper[4717]: I1002 14:21:41.988180 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:41Z","lastTransitionTime":"2025-10-02T14:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.090811 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.090852 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.090860 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.090874 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.090886 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.176176 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/2.log" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.179886 4717 scope.go:117] "RemoveContainer" containerID="b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03" Oct 02 14:21:42 crc kubenswrapper[4717]: E1002 14:21:42.180113 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.192323 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.193573 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.193607 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.193618 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.193633 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.193644 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.205909 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.220112 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.235190 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.245214 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.255918 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.266674 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.276535 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.288267 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.295769 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.295811 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.295820 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.295832 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.295840 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.304341 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"message\\\":\\\"tplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1002 14:21:40.865655 6342 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1002 14:21:40.865669 6342 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1002 14:21:40.865730 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.315918 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.325597 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.338517 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.352199 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.362184 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.372780 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.382968 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.398661 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.398704 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.398713 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.398728 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.398740 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.442384 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.442413 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.442423 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.442436 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.442444 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: E1002 14:21:42.454323 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.457862 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.457888 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.457896 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.457910 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.457919 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: E1002 14:21:42.472314 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.475608 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.475635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.475654 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.475668 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.475677 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: E1002 14:21:42.486527 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.491275 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.491463 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.491565 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.491643 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.491711 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: E1002 14:21:42.505085 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.510631 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.510688 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.510706 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.510726 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.510740 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: E1002 14:21:42.522254 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:42Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:42 crc kubenswrapper[4717]: E1002 14:21:42.522407 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.524500 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.524529 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.524543 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.524650 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.524663 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.626454 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.626494 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.626507 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.626525 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.626537 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.728577 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.728607 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.728617 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.728630 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.728640 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.831298 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.831347 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.831364 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.831385 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.831400 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.933429 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.933476 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.933488 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.933506 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:42 crc kubenswrapper[4717]: I1002 14:21:42.933526 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:42Z","lastTransitionTime":"2025-10-02T14:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.036196 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.036243 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.036252 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.036267 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.036276 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.139185 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.139226 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.139234 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.139250 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.139261 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.241850 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.241897 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.241905 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.241922 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.241951 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.344721 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.344759 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.344768 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.344781 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.344791 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.446883 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.446916 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.446929 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.446970 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.446981 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.549360 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.549446 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.549459 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.549471 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.549482 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.651636 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.651679 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.651691 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.651707 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.651719 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.669113 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.669206 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.669240 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669286 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:22:15.669267207 +0000 UTC m=+86.521121653 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669329 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669343 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669343 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669368 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669379 4717 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669413 4717 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669417 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 14:22:15.66940561 +0000 UTC m=+86.521260056 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.669340 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669442 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:22:15.669435981 +0000 UTC m=+86.521290427 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669357 4717 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.669479 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669492 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 14:22:15.669484372 +0000 UTC m=+86.521338928 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669537 4717 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.669582 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:22:15.669572375 +0000 UTC m=+86.521426821 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.753629 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.753665 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.753674 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.753686 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.753695 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.838052 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.838069 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.838107 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.838084 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.838187 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.838288 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.838370 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:43 crc kubenswrapper[4717]: E1002 14:21:43.838437 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.855996 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.856036 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.856050 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.856069 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.856081 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.958632 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.958666 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.958674 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.958687 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:43 crc kubenswrapper[4717]: I1002 14:21:43.958697 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:43Z","lastTransitionTime":"2025-10-02T14:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.061345 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.061393 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.061410 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.061483 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.061502 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.163818 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.163866 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.163878 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.163894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.163905 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.266028 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.266071 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.266083 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.266098 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.266110 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.368189 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.368222 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.368232 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.368262 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.368272 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.471402 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.471434 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.471443 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.471454 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.471463 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.573200 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.573233 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.573241 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.573253 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.573261 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.675039 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.675087 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.675097 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.675113 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.675126 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.777623 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.777660 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.777668 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.777681 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.777689 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.879709 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.879751 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.879762 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.879778 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.879788 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.982011 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.982046 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.982054 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.982068 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:44 crc kubenswrapper[4717]: I1002 14:21:44.982078 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:44Z","lastTransitionTime":"2025-10-02T14:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.083662 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.083699 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.083710 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.083724 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.083734 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.185333 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.185362 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.185643 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.185671 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.185681 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.288306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.288418 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.288427 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.288439 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.288466 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.391085 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.391124 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.391131 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.391145 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.391155 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.493696 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.493743 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.493753 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.493770 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.493781 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.596596 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.596628 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.596636 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.596654 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.596666 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.699458 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.699512 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.699526 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.699544 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.699570 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.802254 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.802299 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.802315 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.802337 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.802358 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.838675 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.838740 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.838740 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:45 crc kubenswrapper[4717]: E1002 14:21:45.838819 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:45 crc kubenswrapper[4717]: E1002 14:21:45.838987 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:45 crc kubenswrapper[4717]: E1002 14:21:45.839029 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.839064 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:45 crc kubenswrapper[4717]: E1002 14:21:45.839208 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.904660 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.904700 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.904708 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.904722 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:45 crc kubenswrapper[4717]: I1002 14:21:45.904733 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:45Z","lastTransitionTime":"2025-10-02T14:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.008269 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.008311 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.008321 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.008335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.008345 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.110270 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.110320 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.110339 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.110362 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.110378 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.212171 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.212206 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.212215 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.212229 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.212238 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.314297 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.314332 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.314341 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.314352 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.314362 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.416709 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.416758 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.416770 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.416788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.416803 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.519287 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.519347 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.519360 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.519379 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.519395 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.622067 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.622147 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.622162 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.622180 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.622192 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.724945 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.724977 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.724986 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.724998 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.725008 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.827667 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.827722 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.827770 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.827783 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.827792 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.930517 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.930563 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.930571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.930589 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:46 crc kubenswrapper[4717]: I1002 14:21:46.930598 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:46Z","lastTransitionTime":"2025-10-02T14:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.033028 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.033087 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.033100 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.033120 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.033133 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.135696 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.135735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.135747 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.135762 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.135774 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.238166 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.238414 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.238452 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.238474 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.238489 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.341231 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.341306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.341315 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.341331 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.341342 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.443796 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.443837 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.443850 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.443864 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.443873 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.548347 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.548403 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.548416 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.548434 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.548450 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.651161 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.651218 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.651230 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.651250 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.651267 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.754644 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.754730 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.754754 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.754788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.754810 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.838814 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.838861 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.839006 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.839057 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:47 crc kubenswrapper[4717]: E1002 14:21:47.839105 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:47 crc kubenswrapper[4717]: E1002 14:21:47.839249 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:47 crc kubenswrapper[4717]: E1002 14:21:47.839313 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:47 crc kubenswrapper[4717]: E1002 14:21:47.839374 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.857586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.857642 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.857665 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.857689 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.857707 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.960923 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.960977 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.960987 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.961003 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:47 crc kubenswrapper[4717]: I1002 14:21:47.961016 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:47Z","lastTransitionTime":"2025-10-02T14:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.064278 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.064352 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.064371 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.064401 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.064420 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.167128 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.167216 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.167241 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.167273 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.167296 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.270357 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.270444 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.270459 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.270488 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.270502 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.373687 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.373733 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.373743 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.373760 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.373770 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.476474 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.476552 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.476572 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.476605 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.476625 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.579046 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.579437 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.579572 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.579696 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.579881 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.683578 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.683636 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.683655 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.683672 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.683683 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.787762 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.787899 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.787921 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.788088 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.788129 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.890892 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.890980 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.891000 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.891023 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.891043 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.995091 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.995155 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.995176 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.995204 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:48 crc kubenswrapper[4717]: I1002 14:21:48.995226 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:48Z","lastTransitionTime":"2025-10-02T14:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.099779 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.099894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.099913 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.100043 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.100108 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.203330 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.203389 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.203405 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.203428 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.203444 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.306833 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.307008 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.307029 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.307114 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.307197 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.411599 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.411663 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.411681 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.411708 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.411729 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.514797 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.514864 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.514878 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.514899 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.514910 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.618224 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.618277 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.618296 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.618326 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.618347 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.720843 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.720902 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.720915 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.720929 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.720953 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.824016 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.824414 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.824511 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.824607 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.824679 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.838024 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:49 crc kubenswrapper[4717]: E1002 14:21:49.838159 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.838329 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:49 crc kubenswrapper[4717]: E1002 14:21:49.838377 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.838511 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:49 crc kubenswrapper[4717]: E1002 14:21:49.838621 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.838836 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:49 crc kubenswrapper[4717]: E1002 14:21:49.839557 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.929482 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.929537 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.929548 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.929569 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:49 crc kubenswrapper[4717]: I1002 14:21:49.929583 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:49Z","lastTransitionTime":"2025-10-02T14:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.032013 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.032073 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.032086 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.032106 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.032119 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.135376 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.135432 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.135441 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.135459 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.135469 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.239899 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.240181 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.240222 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.240242 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.240255 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.344757 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.345056 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.345149 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.345231 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.345310 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.451753 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.452176 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.452300 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.452421 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.452639 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.556099 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.556186 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.556213 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.556246 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.556272 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.659232 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.659311 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.659335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.659389 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.659415 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.762041 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.762125 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.762144 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.762172 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.762192 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.853464 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.866152 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.866188 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.866198 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.866211 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.866224 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.869045 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.882907 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.893770 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.907853 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.920356 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.936355 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.955170 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.969148 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.969212 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.969222 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.969239 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.969266 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:50Z","lastTransitionTime":"2025-10-02T14:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.972376 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:50 crc kubenswrapper[4717]: I1002 14:21:50.992555 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:50Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.004710 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:51Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.015438 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:51Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.033070 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:51Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.048705 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:51Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.061647 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:51Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.072170 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.072217 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.072228 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.072243 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.072254 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.080502 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"message\\\":\\\"tplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1002 14:21:40.865655 6342 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1002 14:21:40.865669 6342 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1002 14:21:40.865730 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:51Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.094757 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:51Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.175637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.175695 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.175704 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.175720 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.175729 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.278252 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.278300 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.278311 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.278325 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.278334 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.381031 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.381649 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.381676 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.381695 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.381707 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.484032 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.484088 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.484097 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.484112 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.484121 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.586306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.586371 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.586380 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.586395 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.586404 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.688666 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.688704 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.688713 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.688725 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.688734 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.791501 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.791543 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.791555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.791568 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.791579 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.838148 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.838194 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.838195 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.838175 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:51 crc kubenswrapper[4717]: E1002 14:21:51.838339 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:51 crc kubenswrapper[4717]: E1002 14:21:51.838270 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:51 crc kubenswrapper[4717]: E1002 14:21:51.838446 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:51 crc kubenswrapper[4717]: E1002 14:21:51.838515 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.894124 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.894442 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.894524 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.894608 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.894686 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.996514 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.996774 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.996906 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.997032 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:51 crc kubenswrapper[4717]: I1002 14:21:51.997140 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:51Z","lastTransitionTime":"2025-10-02T14:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.099994 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.100038 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.100048 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.100065 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.100074 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.203543 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.203596 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.203605 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.203623 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.203634 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.307985 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.308066 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.308082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.308109 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.308127 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.411207 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.411264 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.411278 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.411300 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.411312 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.513635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.513670 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.513678 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.513692 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.513701 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.616892 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.616988 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.616999 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.617019 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.617032 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.623321 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.623409 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.623429 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.623460 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.623480 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: E1002 14:21:52.636478 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:52Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.641008 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.641066 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.641084 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.641105 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.641119 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: E1002 14:21:52.656809 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:52Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.661319 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.661372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.661384 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.661404 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.661421 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: E1002 14:21:52.674320 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:52Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.678593 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.678646 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.678662 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.678683 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.678698 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: E1002 14:21:52.692769 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:52Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.696628 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.696651 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.696659 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.696674 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.696683 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: E1002 14:21:52.711374 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:21:52Z is after 2025-08-24T17:21:41Z" Oct 02 14:21:52 crc kubenswrapper[4717]: E1002 14:21:52.711516 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.719390 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.719459 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.719473 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.719495 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.719512 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.823509 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.823557 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.823568 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.823583 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.823593 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.976072 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.976130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.976139 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.976158 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:52 crc kubenswrapper[4717]: I1002 14:21:52.976174 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:52Z","lastTransitionTime":"2025-10-02T14:21:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.078786 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.078874 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.078899 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.078961 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.078987 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.182555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.182608 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.182619 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.182638 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.182649 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.286079 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.286130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.286141 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.286157 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.286167 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.389500 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.389560 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.389571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.389590 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.389603 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.493393 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.493478 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.493502 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.493534 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.493552 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.596460 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.596522 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.596535 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.596559 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.596572 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.699510 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.699591 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.699609 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.699640 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.699658 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.802872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.803006 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.803036 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.803064 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.803081 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.838760 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.838922 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.838876 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.838964 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:53 crc kubenswrapper[4717]: E1002 14:21:53.839067 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:53 crc kubenswrapper[4717]: E1002 14:21:53.839197 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:53 crc kubenswrapper[4717]: E1002 14:21:53.839404 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:53 crc kubenswrapper[4717]: E1002 14:21:53.839620 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.906550 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.906608 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.906622 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.906641 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:53 crc kubenswrapper[4717]: I1002 14:21:53.906656 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:53Z","lastTransitionTime":"2025-10-02T14:21:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.009702 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.009758 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.009768 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.009789 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.009802 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.113459 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.113533 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.113553 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.113582 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.113602 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.216859 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.216907 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.216918 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.216953 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.216965 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.319758 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.319799 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.319810 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.319825 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.319838 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.423319 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.423368 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.423415 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.423435 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.423447 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.526797 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.526840 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.526852 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.526869 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.526881 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.630764 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.630817 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.630833 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.630855 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.630869 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.733171 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.733229 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.733240 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.733254 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.733266 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.837238 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.837283 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.837294 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.837311 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.837323 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.839288 4717 scope.go:117] "RemoveContainer" containerID="b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03" Oct 02 14:21:54 crc kubenswrapper[4717]: E1002 14:21:54.839584 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.939827 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.939868 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.939876 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.939891 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:54 crc kubenswrapper[4717]: I1002 14:21:54.939901 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:54Z","lastTransitionTime":"2025-10-02T14:21:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.042335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.042373 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.042382 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.042398 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.042411 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.144757 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.144789 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.144801 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.144817 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.144828 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.246666 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.246699 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.246710 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.246725 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.246735 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.349158 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.349209 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.349401 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.349418 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.349431 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.451208 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.451254 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.451282 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.451295 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.451304 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.553347 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.553391 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.553406 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.553421 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.553431 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.655796 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.655867 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.655880 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.655893 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.655907 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.759138 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.759187 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.759199 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.759220 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.759235 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.838558 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.838610 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.838631 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.838651 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:55 crc kubenswrapper[4717]: E1002 14:21:55.838716 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:55 crc kubenswrapper[4717]: E1002 14:21:55.838823 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:55 crc kubenswrapper[4717]: E1002 14:21:55.838876 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:55 crc kubenswrapper[4717]: E1002 14:21:55.838988 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.861146 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.861211 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.861221 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.861238 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.861247 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.963236 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.963269 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.963278 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.963293 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:55 crc kubenswrapper[4717]: I1002 14:21:55.963303 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:55Z","lastTransitionTime":"2025-10-02T14:21:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.065972 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.066025 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.066036 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.066055 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.066066 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.169572 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.169616 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.169628 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.169647 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.169659 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.272693 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.272748 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.272761 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.272779 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.272791 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.375769 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.375818 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.375829 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.375844 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.375855 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.478189 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.478257 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.478273 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.478302 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.478320 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.581052 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.581154 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.581178 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.581205 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.581226 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.683582 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.683620 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.683631 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.683646 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.683657 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.785801 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.785864 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.785877 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.785892 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.785901 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.853817 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.888700 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.888746 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.888756 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.888774 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.888785 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.991733 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.991771 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.991780 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.991795 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:56 crc kubenswrapper[4717]: I1002 14:21:56.991804 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:56Z","lastTransitionTime":"2025-10-02T14:21:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.094796 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.094854 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.094863 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.094880 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.094891 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.198166 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.198250 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.198277 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.198309 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.198329 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.300473 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.300542 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.300580 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.300609 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.300632 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.404037 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.404096 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.404105 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.404120 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.404129 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.506872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.506917 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.506944 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.506966 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.506977 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.608916 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.609095 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.609113 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.609146 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.609166 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.620494 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:57 crc kubenswrapper[4717]: E1002 14:21:57.620714 4717 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:57 crc kubenswrapper[4717]: E1002 14:21:57.620829 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs podName:36c8fcb0-a074-461c-a5d1-c01106ee4997 nodeName:}" failed. No retries permitted until 2025-10-02 14:22:29.620794134 +0000 UTC m=+100.472648610 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs") pod "network-metrics-daemon-7v6wt" (UID: "36c8fcb0-a074-461c-a5d1-c01106ee4997") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.712845 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.713028 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.713052 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.713083 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.713108 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.817990 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.818045 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.818056 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.818080 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.818094 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.838668 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.838684 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.838696 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.838663 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:57 crc kubenswrapper[4717]: E1002 14:21:57.838846 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:57 crc kubenswrapper[4717]: E1002 14:21:57.838878 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:57 crc kubenswrapper[4717]: E1002 14:21:57.838979 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:57 crc kubenswrapper[4717]: E1002 14:21:57.839096 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.920542 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.920616 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.920631 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.920650 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:57 crc kubenswrapper[4717]: I1002 14:21:57.920687 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:57Z","lastTransitionTime":"2025-10-02T14:21:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.024038 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.024082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.024095 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.024111 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.024123 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.127394 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.127456 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.127466 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.127482 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.127492 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.229197 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.229252 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.229264 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.229281 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.229293 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.331563 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.331610 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.331623 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.331637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.331648 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.438914 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.438972 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.438983 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.438999 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.439009 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.540879 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.540927 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.540956 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.540974 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.540987 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.643735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.643771 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.643780 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.643797 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.643808 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.745786 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.745815 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.745823 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.745834 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.745842 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.848016 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.848078 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.848087 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.848098 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.848107 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.950466 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.950510 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.950522 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.950537 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:58 crc kubenswrapper[4717]: I1002 14:21:58.950546 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:58Z","lastTransitionTime":"2025-10-02T14:21:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.053331 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.053367 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.053378 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.053394 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.053405 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.155616 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.155657 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.155671 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.155689 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.155709 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.257843 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.257879 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.257891 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.257906 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.257918 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.360020 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.360052 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.360061 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.360072 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.360082 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.462041 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.462083 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.462094 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.462108 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.462119 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.564761 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.564799 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.564811 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.564831 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.564842 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.666852 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.666877 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.666885 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.666898 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.666906 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.789887 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.790235 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.790246 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.790259 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.790268 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.838471 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.838545 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.838583 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.838553 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:21:59 crc kubenswrapper[4717]: E1002 14:21:59.838648 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:21:59 crc kubenswrapper[4717]: E1002 14:21:59.838760 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:21:59 crc kubenswrapper[4717]: E1002 14:21:59.838851 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:21:59 crc kubenswrapper[4717]: E1002 14:21:59.839029 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.892955 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.893015 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.893029 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.893053 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.893068 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.995279 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.995313 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.995322 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.995335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:21:59 crc kubenswrapper[4717]: I1002 14:21:59.995345 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:21:59Z","lastTransitionTime":"2025-10-02T14:21:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.098476 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.098508 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.098518 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.098533 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.098544 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.201468 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.201513 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.201529 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.201571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.201589 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.304691 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.304768 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.304782 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.304826 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.304839 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.407026 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.407527 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.407594 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.407656 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.407721 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.510040 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.510099 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.510110 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.510124 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.510161 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.612292 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.612344 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.612355 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.612372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.612385 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.714720 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.714784 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.714794 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.714810 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.714821 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.816710 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.817079 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.817256 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.817388 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.817511 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.852642 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.865828 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.879550 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.891354 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.906068 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.918890 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.920005 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.920064 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.920075 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.920089 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.920099 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:00Z","lastTransitionTime":"2025-10-02T14:22:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.931452 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.945193 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.956816 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.970230 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.980780 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:00 crc kubenswrapper[4717]: I1002 14:22:00.992172 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:00Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.004042 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.015946 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.022085 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.022129 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.022139 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.022153 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.022163 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.029097 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.046833 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"message\\\":\\\"tplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1002 14:21:40.865655 6342 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1002 14:21:40.865669 6342 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1002 14:21:40.865730 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.072221 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.095275 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.124050 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.124096 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.124106 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.124125 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.124137 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.226716 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.226763 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.226771 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.226792 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.226811 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.242499 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/0.log" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.242572 4717 generic.go:334] "Generic (PLEG): container finished" podID="424c679b-8db0-4ba4-9c8f-67a65fe38048" containerID="0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893" exitCode=1 Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.242612 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s7n7q" event={"ID":"424c679b-8db0-4ba4-9c8f-67a65fe38048","Type":"ContainerDied","Data":"0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.243131 4717 scope.go:117] "RemoveContainer" containerID="0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.268181 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.285533 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.299481 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:00Z\\\",\\\"message\\\":\\\"2025-10-02T14:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa\\\\n2025-10-02T14:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa to /host/opt/cni/bin/\\\\n2025-10-02T14:21:15Z [verbose] multus-daemon started\\\\n2025-10-02T14:21:15Z [verbose] Readiness Indicator file check\\\\n2025-10-02T14:22:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.318327 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"message\\\":\\\"tplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1002 14:21:40.865655 6342 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1002 14:21:40.865669 6342 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1002 14:21:40.865730 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.328559 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.329757 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.329785 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.329794 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.329807 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.329816 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.338541 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.348674 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.360636 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.371442 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.380360 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.391199 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.403261 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.416876 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.430331 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.432046 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.432075 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.432086 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.432102 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.432113 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.440121 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.452713 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.465377 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.475139 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:01Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.534355 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.534390 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.534401 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.534416 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.534426 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.636698 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.636734 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.636747 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.636766 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.636778 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.739729 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.739757 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.739765 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.739778 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.739788 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.838437 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.838478 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:01 crc kubenswrapper[4717]: E1002 14:22:01.838743 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.838437 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.838811 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:01 crc kubenswrapper[4717]: E1002 14:22:01.838847 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:01 crc kubenswrapper[4717]: E1002 14:22:01.838888 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:01 crc kubenswrapper[4717]: E1002 14:22:01.838950 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.841486 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.841508 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.841515 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.841526 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.841535 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.961610 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.961648 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.961668 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.961691 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:01 crc kubenswrapper[4717]: I1002 14:22:01.961702 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:01Z","lastTransitionTime":"2025-10-02T14:22:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.063705 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.063759 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.063769 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.063783 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.063792 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.166065 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.166110 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.166120 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.166135 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.166145 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.251517 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/0.log" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.251583 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s7n7q" event={"ID":"424c679b-8db0-4ba4-9c8f-67a65fe38048","Type":"ContainerStarted","Data":"65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.268186 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.268222 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.268230 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.268245 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.268254 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.272601 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"message\\\":\\\"tplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1002 14:21:40.865655 6342 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1002 14:21:40.865669 6342 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1002 14:21:40.865730 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.282555 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.292431 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.303886 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.316262 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.331483 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.343435 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:00Z\\\",\\\"message\\\":\\\"2025-10-02T14:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa\\\\n2025-10-02T14:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa to /host/opt/cni/bin/\\\\n2025-10-02T14:21:15Z [verbose] multus-daemon started\\\\n2025-10-02T14:21:15Z [verbose] Readiness Indicator file check\\\\n2025-10-02T14:22:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:22:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.354843 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.364497 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.370445 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.370504 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.370516 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.370533 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.370544 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.377335 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.389119 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.401983 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.411579 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.424858 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.437157 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.453755 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.464103 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.472887 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.472956 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.472969 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.472988 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.473000 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.473329 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:02Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.575084 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.575132 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.575143 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.575158 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.575169 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.678469 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.678539 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.678551 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.678568 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.678580 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.781449 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.781493 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.781502 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.781517 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.781545 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.883635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.883676 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.883684 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.883700 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.883710 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.985713 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.985763 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.985776 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.985799 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:02 crc kubenswrapper[4717]: I1002 14:22:02.985812 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:02Z","lastTransitionTime":"2025-10-02T14:22:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.031292 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.031338 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.031348 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.031369 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.031380 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.042672 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:03Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.045996 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.046027 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.046037 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.046051 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.046062 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.057138 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:03Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.060556 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.060598 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.060608 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.060626 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.060637 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.071745 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:03Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.074374 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.074397 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.074405 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.074418 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.074430 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.086633 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:03Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.090218 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.090253 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.090262 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.090278 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.090288 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.102432 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:03Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.102608 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.104672 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.104714 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.104724 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.104742 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.104754 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.207492 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.207530 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.207539 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.207555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.207565 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.309815 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.309852 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.309861 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.309875 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.309885 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.412255 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.412411 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.412433 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.412466 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.412489 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.514589 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.514625 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.514635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.514650 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.514660 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.616388 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.616427 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.616437 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.616452 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.616462 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.718854 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.718895 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.718903 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.718916 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.718925 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.821282 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.821317 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.821326 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.821341 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.821350 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.838763 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.838856 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.838912 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.838962 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.839034 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.838971 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.839191 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:03 crc kubenswrapper[4717]: E1002 14:22:03.839099 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.923988 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.924018 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.924026 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.924037 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:03 crc kubenswrapper[4717]: I1002 14:22:03.924046 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:03Z","lastTransitionTime":"2025-10-02T14:22:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.027142 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.027190 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.027200 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.027216 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.027230 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.129454 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.129495 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.129503 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.129516 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.129530 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.231631 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.231670 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.231681 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.231698 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.231709 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.335014 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.335055 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.335063 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.335078 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.335090 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.437785 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.437851 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.437860 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.437874 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.437883 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.540535 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.540597 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.540615 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.540640 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.540658 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.642679 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.642750 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.642764 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.642780 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.642791 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.746583 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.746637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.746658 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.746673 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.746684 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.848978 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.849056 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.849070 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.849087 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.849102 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.951376 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.951418 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.951439 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.951453 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:04 crc kubenswrapper[4717]: I1002 14:22:04.951465 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:04Z","lastTransitionTime":"2025-10-02T14:22:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.054223 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.054267 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.054275 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.054288 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.054297 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.157312 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.157355 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.157366 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.157385 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.157394 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.260252 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.260332 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.260344 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.260361 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.260372 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.362169 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.362209 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.362218 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.362232 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.362241 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.465055 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.465105 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.465118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.465139 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.465152 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.568398 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.568439 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.568448 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.568464 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.568474 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.671621 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.671659 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.671667 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.671679 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.671688 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.774125 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.774154 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.774161 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.774175 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.774184 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.838180 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.838262 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.838180 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.838199 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:05 crc kubenswrapper[4717]: E1002 14:22:05.838402 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:05 crc kubenswrapper[4717]: E1002 14:22:05.838284 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:05 crc kubenswrapper[4717]: E1002 14:22:05.838605 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:05 crc kubenswrapper[4717]: E1002 14:22:05.838716 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.876370 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.876445 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.876459 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.876475 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.876485 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.978648 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.978687 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.978696 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.978710 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:05 crc kubenswrapper[4717]: I1002 14:22:05.978719 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:05Z","lastTransitionTime":"2025-10-02T14:22:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.080867 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.080965 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.080986 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.081014 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.081036 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.182683 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.182717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.182729 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.182745 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.182755 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.288407 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.288469 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.288488 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.288508 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.288528 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.391425 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.391460 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.392057 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.392080 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.392091 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.494517 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.494554 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.494562 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.494575 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.494585 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.597529 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.597577 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.597587 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.597601 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.597612 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.700010 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.700145 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.700155 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.700168 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.700182 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.802451 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.802498 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.802510 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.802527 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.802541 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.838994 4717 scope.go:117] "RemoveContainer" containerID="b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.904846 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.905181 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.905190 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.905204 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:06 crc kubenswrapper[4717]: I1002 14:22:06.905214 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:06Z","lastTransitionTime":"2025-10-02T14:22:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.007883 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.007918 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.007949 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.007968 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.007980 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.111498 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.111539 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.111550 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.111566 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.111579 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.213574 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.213611 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.213621 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.213637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.213648 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.268299 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/2.log" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.270483 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.270923 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.282896 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.296046 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.307238 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.315220 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.315269 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.315278 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.315292 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.315301 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.317029 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.328997 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.339164 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.350254 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.365385 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.376250 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.389217 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.398898 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.409558 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.418388 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.418426 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.418438 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.418455 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.418492 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.420102 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.430720 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.441041 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:00Z\\\",\\\"message\\\":\\\"2025-10-02T14:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa\\\\n2025-10-02T14:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa to /host/opt/cni/bin/\\\\n2025-10-02T14:21:15Z [verbose] multus-daemon started\\\\n2025-10-02T14:21:15Z [verbose] Readiness Indicator file check\\\\n2025-10-02T14:22:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:22:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.455029 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"message\\\":\\\"tplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1002 14:21:40.865655 6342 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1002 14:21:40.865669 6342 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1002 14:21:40.865730 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:22:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.464174 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.475084 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:07Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.520680 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.520725 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.520735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.520749 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.520759 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.623562 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.623617 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.623629 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.623648 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.623663 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.725703 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.725739 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.725749 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.725762 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.725771 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.827502 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.827545 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.827557 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.827574 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.827585 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.837988 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.838034 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.838034 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:07 crc kubenswrapper[4717]: E1002 14:22:07.838119 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:07 crc kubenswrapper[4717]: E1002 14:22:07.838275 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:07 crc kubenswrapper[4717]: E1002 14:22:07.838351 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.838371 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:07 crc kubenswrapper[4717]: E1002 14:22:07.838708 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.930580 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.930629 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.930639 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.930659 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:07 crc kubenswrapper[4717]: I1002 14:22:07.930671 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:07Z","lastTransitionTime":"2025-10-02T14:22:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.033541 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.033611 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.033630 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.033658 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.033678 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.136959 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.137002 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.137011 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.137027 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.137037 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.240111 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.240167 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.240183 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.240202 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.240217 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.276418 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/3.log" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.277157 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/2.log" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.280304 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9" exitCode=1 Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.280361 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.280414 4717 scope.go:117] "RemoveContainer" containerID="b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.281148 4717 scope.go:117] "RemoveContainer" containerID="c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9" Oct 02 14:22:08 crc kubenswrapper[4717]: E1002 14:22:08.281315 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.294272 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.306728 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.317407 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.330335 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.341905 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.344431 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.344463 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.344473 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.344486 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.344495 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.353226 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:00Z\\\",\\\"message\\\":\\\"2025-10-02T14:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa\\\\n2025-10-02T14:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa to /host/opt/cni/bin/\\\\n2025-10-02T14:21:15Z [verbose] multus-daemon started\\\\n2025-10-02T14:21:15Z [verbose] Readiness Indicator file check\\\\n2025-10-02T14:22:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:22:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.369591 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b480d22664950c7d1482a0dc42e2cb21f1d4f3dc44906fcfa449d78eb5c79f03\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"message\\\":\\\"tplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1002 14:21:40.865655 6342 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1002 14:21:40.865669 6342 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-marketplace/certified-operators]} name:Service_openshift-marketplace/certified-operators_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.214:50051:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {20da2226-531c-4179-9810-aa4026995ca3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1002 14:21:40.865730 6342 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:07Z\\\",\\\"message\\\":\\\"y.go:140\\\\nI1002 14:22:07.645723 6700 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:22:07.646580 6700 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:22:07.646632 6700 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1002 14:22:07.646637 6700 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1002 14:22:07.646650 6700 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1002 14:22:07.646655 6700 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1002 14:22:07.646745 6700 factory.go:656] Stopping watch factory\\\\nI1002 14:22:07.646759 6700 ovnkube.go:599] Stopped ovnkube\\\\nI1002 14:22:07.646794 6700 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:22:07.646804 6700 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:22:07.646809 6700 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:22:07.646814 6700 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:22:07.646819 6700 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:22:07.646831 6700 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1002 14:22:07.646898 6700 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:22:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.380759 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.392123 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.405245 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.417015 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.428531 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.440553 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.447260 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.447306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.447318 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.447335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.447348 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.453583 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.469264 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.491927 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.516787 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.536772 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:08Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.550576 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.550675 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.550717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.550755 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.550779 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.653334 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.653407 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.653426 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.653524 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.653541 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.757283 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.757370 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.757384 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.757407 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.757421 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.859842 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.859898 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.859916 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.859980 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.860017 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.963016 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.963083 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.963102 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.963135 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:08 crc kubenswrapper[4717]: I1002 14:22:08.963152 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:08Z","lastTransitionTime":"2025-10-02T14:22:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.066867 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.066949 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.066970 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.066991 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.067011 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.169647 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.169726 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.169741 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.169765 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.169779 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.273756 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.273837 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.273887 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.273918 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.273956 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.286711 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/3.log" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.290043 4717 scope.go:117] "RemoveContainer" containerID="c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9" Oct 02 14:22:09 crc kubenswrapper[4717]: E1002 14:22:09.290203 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.304823 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.318021 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.328971 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.340490 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.350357 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.362078 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.375377 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.376618 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.376671 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.376683 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.376699 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.376710 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.387053 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.402051 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.416864 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.427992 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.444464 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.462820 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.498841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.498896 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.498909 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.498928 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.498961 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.501859 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:00Z\\\",\\\"message\\\":\\\"2025-10-02T14:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa\\\\n2025-10-02T14:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa to /host/opt/cni/bin/\\\\n2025-10-02T14:21:15Z [verbose] multus-daemon started\\\\n2025-10-02T14:21:15Z [verbose] Readiness Indicator file check\\\\n2025-10-02T14:22:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:22:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.522393 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:07Z\\\",\\\"message\\\":\\\"y.go:140\\\\nI1002 14:22:07.645723 6700 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:22:07.646580 6700 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:22:07.646632 6700 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1002 14:22:07.646637 6700 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1002 14:22:07.646650 6700 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1002 14:22:07.646655 6700 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1002 14:22:07.646745 6700 factory.go:656] Stopping watch factory\\\\nI1002 14:22:07.646759 6700 ovnkube.go:599] Stopped ovnkube\\\\nI1002 14:22:07.646794 6700 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:22:07.646804 6700 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:22:07.646809 6700 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:22:07.646814 6700 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:22:07.646819 6700 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:22:07.646831 6700 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1002 14:22:07.646898 6700 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:22:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.536621 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.549150 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.562516 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:09Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.602076 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.602127 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.602138 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.602159 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.602173 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.705307 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.705394 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.705422 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.705456 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.705481 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.809761 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.809858 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.809877 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.809910 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.809962 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.838853 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.838974 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:09 crc kubenswrapper[4717]: E1002 14:22:09.839002 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.839045 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:09 crc kubenswrapper[4717]: E1002 14:22:09.839159 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.838968 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:09 crc kubenswrapper[4717]: E1002 14:22:09.839310 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:09 crc kubenswrapper[4717]: E1002 14:22:09.839486 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.914116 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.914194 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.914218 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.914255 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:09 crc kubenswrapper[4717]: I1002 14:22:09.914321 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:09Z","lastTransitionTime":"2025-10-02T14:22:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.018312 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.018376 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.018395 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.018425 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.018447 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.122143 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.122209 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.122220 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.122246 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.122258 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.225123 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.225159 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.225167 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.225182 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.225194 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.328684 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.328720 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.328728 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.328745 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.328755 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.430430 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.430526 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.430544 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.430575 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.430607 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.560602 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.560731 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.560758 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.560794 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.560819 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.662952 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.662988 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.662999 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.663012 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.663020 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.765001 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.765056 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.765068 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.765084 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.765096 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.859878 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.867492 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.867521 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.867542 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.867555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.867564 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.873803 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.887466 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.897007 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.905080 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.915921 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:00Z\\\",\\\"message\\\":\\\"2025-10-02T14:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa\\\\n2025-10-02T14:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa to /host/opt/cni/bin/\\\\n2025-10-02T14:21:15Z [verbose] multus-daemon started\\\\n2025-10-02T14:21:15Z [verbose] Readiness Indicator file check\\\\n2025-10-02T14:22:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:22:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.933284 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:07Z\\\",\\\"message\\\":\\\"y.go:140\\\\nI1002 14:22:07.645723 6700 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:22:07.646580 6700 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:22:07.646632 6700 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1002 14:22:07.646637 6700 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1002 14:22:07.646650 6700 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1002 14:22:07.646655 6700 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1002 14:22:07.646745 6700 factory.go:656] Stopping watch factory\\\\nI1002 14:22:07.646759 6700 ovnkube.go:599] Stopped ovnkube\\\\nI1002 14:22:07.646794 6700 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:22:07.646804 6700 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:22:07.646809 6700 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:22:07.646814 6700 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:22:07.646819 6700 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:22:07.646831 6700 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1002 14:22:07.646898 6700 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:22:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.944807 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.957528 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.968865 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.969808 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.969913 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.970126 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.970270 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.970565 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:10Z","lastTransitionTime":"2025-10-02T14:22:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.981528 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:10 crc kubenswrapper[4717]: I1002 14:22:10.994667 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:10Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.004853 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:11Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.015442 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:11Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.026484 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:11Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.039382 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:11Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.053864 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:11Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.067071 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:11Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.072594 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.073093 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.073417 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.073637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.073836 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.176703 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.176750 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.176764 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.176783 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.176797 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.278845 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.279097 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.279130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.279157 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.279169 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.381725 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.381780 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.381791 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.381806 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.381816 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.484298 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.484333 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.484341 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.484355 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.484364 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.590545 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.590598 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.590617 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.590640 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.590658 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.692562 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.692613 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.692629 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.692655 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.692672 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.795029 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.795574 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.795671 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.795763 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.795849 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.837865 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:11 crc kubenswrapper[4717]: E1002 14:22:11.838002 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.837865 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.838256 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:11 crc kubenswrapper[4717]: E1002 14:22:11.838375 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.838394 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:11 crc kubenswrapper[4717]: E1002 14:22:11.838556 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:11 crc kubenswrapper[4717]: E1002 14:22:11.838249 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.898236 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.898279 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.898291 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.898307 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:11 crc kubenswrapper[4717]: I1002 14:22:11.898318 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:11Z","lastTransitionTime":"2025-10-02T14:22:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.000788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.000826 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.000835 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.000851 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.000863 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.103245 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.103295 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.103308 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.103325 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.103336 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.205098 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.205168 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.205194 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.205222 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.205247 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.306807 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.307142 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.307246 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.307366 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.307472 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.409821 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.409860 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.409875 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.409896 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.409911 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.511875 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.511949 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.511966 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.511989 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.512005 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.614861 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.614893 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.614902 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.614916 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.614941 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.718287 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.718382 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.718407 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.718442 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.718503 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.821683 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.821727 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.821737 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.821751 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.821761 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.923910 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.923986 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.923998 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.924010 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:12 crc kubenswrapper[4717]: I1002 14:22:12.924019 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:12Z","lastTransitionTime":"2025-10-02T14:22:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.026510 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.026803 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.026869 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.027017 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.027084 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.129684 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.129952 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.130029 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.130108 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.130177 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.193806 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.193863 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.193875 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.193892 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.193903 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.213215 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.218321 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.218364 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.218375 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.218389 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.218400 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.238191 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.243335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.243494 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.243604 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.243712 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.243811 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.260656 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.264416 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.264461 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.264471 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.264490 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.264500 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.278952 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.283178 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.283209 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.283218 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.283231 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.283239 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.302246 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:13Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.302362 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.303868 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.303995 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.304024 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.304082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.304105 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.407285 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.407330 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.407343 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.407359 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.407371 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.509224 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.509269 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.509282 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.509299 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.509311 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.611590 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.611929 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.612067 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.612169 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.612254 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.715271 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.715590 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.715707 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.715815 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.715913 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.819444 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.819479 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.819490 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.819505 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.819514 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.838830 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.838857 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.839684 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.838918 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.838986 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.839541 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.839964 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:13 crc kubenswrapper[4717]: E1002 14:22:13.840079 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.922380 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.922660 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.922794 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.922975 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:13 crc kubenswrapper[4717]: I1002 14:22:13.923155 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:13Z","lastTransitionTime":"2025-10-02T14:22:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.026292 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.026642 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.026833 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.027090 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.027303 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.130534 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.130607 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.130625 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.130646 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.130662 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.232971 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.233017 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.233028 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.233045 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.233056 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.335248 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.335294 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.335310 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.335332 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.335347 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.438078 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.438141 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.438162 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.438194 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.438216 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.540722 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.540772 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.540783 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.540800 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.540810 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.643172 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.643224 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.643236 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.643251 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.643262 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.745530 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.745575 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.745587 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.745606 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.745616 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.847822 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.847862 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.847874 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.847887 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.847896 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.950546 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.950590 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.950598 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.950612 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:14 crc kubenswrapper[4717]: I1002 14:22:14.950621 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:14Z","lastTransitionTime":"2025-10-02T14:22:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.053592 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.053637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.053648 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.053662 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.053672 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.156043 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.156087 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.156098 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.156113 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.156123 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.258587 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.258674 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.258690 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.258713 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.258734 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.361625 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.361732 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.361803 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.361876 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.361904 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.465176 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.465231 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.465243 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.465260 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.465272 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.568876 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.569023 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.569076 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.569103 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.569121 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.671768 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.671817 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.671832 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.671850 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.671865 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.714346 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714457 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.714432811 +0000 UTC m=+150.566287267 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.714523 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.714564 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.714596 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.714643 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714730 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714741 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714760 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714760 4717 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714764 4717 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714828 4717 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714775 4717 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714778 4717 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714855 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.714832432 +0000 UTC m=+150.566686918 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.714978 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.714964105 +0000 UTC m=+150.566818591 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.715004 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.714993176 +0000 UTC m=+150.566847662 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.715026 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.715015567 +0000 UTC m=+150.566870053 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.774228 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.774281 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.774298 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.774322 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.774339 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.838287 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.838372 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.838380 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.838320 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.838492 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.838680 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.838833 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:15 crc kubenswrapper[4717]: E1002 14:22:15.839038 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.877051 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.877107 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.877130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.877160 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.877184 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.980230 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.980267 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.980276 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.980291 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:15 crc kubenswrapper[4717]: I1002 14:22:15.980303 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:15Z","lastTransitionTime":"2025-10-02T14:22:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.082212 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.082286 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.082299 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.082313 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.082323 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.185246 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.185301 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.185318 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.185340 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.185357 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.290024 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.290385 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.290407 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.290430 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.290447 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.393389 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.393680 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.393752 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.393833 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.393921 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.496913 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.497249 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.497440 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.497608 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.497754 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.601024 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.601097 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.601119 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.601152 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.601174 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.704559 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.704628 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.704651 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.704675 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.704694 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.807293 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.807635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.807766 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.807842 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.807906 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.910203 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.910275 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.910288 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.910304 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:16 crc kubenswrapper[4717]: I1002 14:22:16.910342 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:16Z","lastTransitionTime":"2025-10-02T14:22:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.012829 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.012872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.012884 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.012901 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.012914 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.116082 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.116129 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.116141 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.116159 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.116173 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.218538 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.218804 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.218939 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.219117 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.219251 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.320829 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.321190 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.321296 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.321372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.321451 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.423641 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.423865 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.423928 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.424045 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.424159 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.526317 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.526351 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.526360 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.526372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.526380 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.659152 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.659212 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.659228 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.659247 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.659258 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.761865 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.761891 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.761899 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.761912 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.761922 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.838661 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.838731 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.838667 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:17 crc kubenswrapper[4717]: E1002 14:22:17.838864 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:17 crc kubenswrapper[4717]: E1002 14:22:17.838880 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:17 crc kubenswrapper[4717]: E1002 14:22:17.838976 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.839295 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:17 crc kubenswrapper[4717]: E1002 14:22:17.839370 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.864264 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.864292 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.864300 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.864311 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.864318 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.967216 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.967315 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.967338 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.967367 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:17 crc kubenswrapper[4717]: I1002 14:22:17.967389 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:17Z","lastTransitionTime":"2025-10-02T14:22:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.070440 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.070538 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.070560 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.070591 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.070610 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.173739 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.173787 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.173796 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.173814 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.173824 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.276482 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.276565 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.276587 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.276615 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.276639 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.379673 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.379736 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.379758 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.379781 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.379799 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.487424 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.487708 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.487829 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.487936 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.488069 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.591071 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.591131 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.591155 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.591185 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.591208 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.694469 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.694522 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.694537 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.694556 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.694573 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.797189 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.797266 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.797290 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.797320 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.797342 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.900674 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.900759 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.900778 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.900807 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:18 crc kubenswrapper[4717]: I1002 14:22:18.900830 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:18Z","lastTransitionTime":"2025-10-02T14:22:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.004047 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.004183 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.004231 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.004264 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.004285 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.107255 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.107339 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.107363 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.107395 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.107416 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.210447 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.210507 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.210525 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.210550 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.210570 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.314650 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.314715 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.314740 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.314769 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.314790 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.417252 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.417326 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.417349 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.417376 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.417396 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.520541 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.520629 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.520653 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.520682 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.520704 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.623550 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.623593 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.623609 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.623626 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.623636 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.726868 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.726976 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.727002 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.727025 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.727047 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.829293 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.829358 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.829371 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.829388 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.829401 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.838819 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.838849 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.838986 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:19 crc kubenswrapper[4717]: E1002 14:22:19.839087 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.839131 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:19 crc kubenswrapper[4717]: E1002 14:22:19.839187 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:19 crc kubenswrapper[4717]: E1002 14:22:19.839293 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:19 crc kubenswrapper[4717]: E1002 14:22:19.839421 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.931313 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.931390 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.931423 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.931453 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:19 crc kubenswrapper[4717]: I1002 14:22:19.931476 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:19Z","lastTransitionTime":"2025-10-02T14:22:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.033841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.033870 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.033882 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.033898 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.033909 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.136301 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.136367 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.136383 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.136413 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.136434 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.240673 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.240798 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.240817 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.240847 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.240869 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.344097 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.344140 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.344150 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.344167 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.344177 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.446966 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.447016 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.447028 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.447045 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.447058 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.549547 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.549619 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.549628 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.549642 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.549651 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.652596 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.652657 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.652679 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.652708 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.652729 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.755682 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.755726 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.755742 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.755766 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.755779 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.857845 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.857904 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.857925 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.857988 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.858010 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.861060 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.873807 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.887858 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.898549 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.907624 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.922675 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:07Z\\\",\\\"message\\\":\\\"y.go:140\\\\nI1002 14:22:07.645723 6700 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:22:07.646580 6700 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:22:07.646632 6700 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1002 14:22:07.646637 6700 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1002 14:22:07.646650 6700 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1002 14:22:07.646655 6700 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1002 14:22:07.646745 6700 factory.go:656] Stopping watch factory\\\\nI1002 14:22:07.646759 6700 ovnkube.go:599] Stopped ovnkube\\\\nI1002 14:22:07.646794 6700 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:22:07.646804 6700 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:22:07.646809 6700 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:22:07.646814 6700 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:22:07.646819 6700 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:22:07.646831 6700 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1002 14:22:07.646898 6700 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:22:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.932572 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.942729 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.952670 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.961234 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.961305 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.961319 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.961365 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.961381 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:20Z","lastTransitionTime":"2025-10-02T14:22:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.963021 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.972783 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.982324 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:00Z\\\",\\\"message\\\":\\\"2025-10-02T14:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa\\\\n2025-10-02T14:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa to /host/opt/cni/bin/\\\\n2025-10-02T14:21:15Z [verbose] multus-daemon started\\\\n2025-10-02T14:21:15Z [verbose] Readiness Indicator file check\\\\n2025-10-02T14:22:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:22:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.991772 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:20 crc kubenswrapper[4717]: I1002 14:22:20.999731 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:20Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.009701 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.021139 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.032826 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.040859 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:21Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.063262 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.063306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.063319 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.063335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.063347 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.165877 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.165921 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.165965 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.165984 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.166004 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.268063 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.268108 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.268142 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.268166 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.268177 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.370559 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.370605 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.370614 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.370628 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.370696 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.473280 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.473374 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.473386 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.473425 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.473440 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.575265 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.575298 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.575306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.575335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.575345 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.677243 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.677276 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.677287 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.677303 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.677314 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.781665 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.781704 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.781794 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.781840 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.781861 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.838368 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.838488 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:21 crc kubenswrapper[4717]: E1002 14:22:21.838620 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.838726 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.839088 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:21 crc kubenswrapper[4717]: E1002 14:22:21.839184 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.839371 4717 scope.go:117] "RemoveContainer" containerID="c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9" Oct 02 14:22:21 crc kubenswrapper[4717]: E1002 14:22:21.839450 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:21 crc kubenswrapper[4717]: E1002 14:22:21.839533 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:22:21 crc kubenswrapper[4717]: E1002 14:22:21.839598 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.883803 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.883843 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.883856 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.883872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.883885 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.986404 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.986441 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.986451 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.986464 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:21 crc kubenswrapper[4717]: I1002 14:22:21.986475 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:21Z","lastTransitionTime":"2025-10-02T14:22:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.088872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.088917 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.088948 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.088967 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.088979 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.191058 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.191095 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.191106 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.191122 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.191134 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.292587 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.292636 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.292647 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.292665 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.292680 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.395280 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.395332 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.395343 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.395361 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.395375 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.498512 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.498550 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.498558 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.498572 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.498581 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.601513 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.601561 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.601576 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.601594 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.601605 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.704192 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.704240 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.704250 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.704265 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.704275 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.806869 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.806975 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.806996 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.807020 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.807037 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.910053 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.910115 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.910130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.910149 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:22 crc kubenswrapper[4717]: I1002 14:22:22.910163 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:22Z","lastTransitionTime":"2025-10-02T14:22:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.012747 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.012835 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.012851 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.012870 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.012884 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.114892 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.114963 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.114977 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.114994 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.115007 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.217437 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.217538 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.217561 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.217590 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.217615 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.319742 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.319816 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.319835 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.319852 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.319861 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.422534 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.422588 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.422599 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.422618 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.422632 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.525624 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.525672 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.525695 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.525719 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.525734 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.628338 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.628437 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.628455 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.628480 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.628498 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.663747 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.663801 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.663815 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.663832 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.663845 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.675177 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.678535 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.678579 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.678591 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.678610 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.678622 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.696242 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.700581 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.700623 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.700637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.700653 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.700667 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.713924 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.721384 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.721422 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.721431 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.721446 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.721455 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.734347 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.737936 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.737986 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.737995 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.738010 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.738020 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.750213 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:23Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.750372 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.752203 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.752265 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.752283 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.752309 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.752327 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.837823 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.837873 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.838088 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.838108 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.838199 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.838250 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.838273 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:23 crc kubenswrapper[4717]: E1002 14:22:23.838386 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.854781 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.854833 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.854850 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.854870 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.854887 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.956791 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.956859 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.956870 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.956883 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:23 crc kubenswrapper[4717]: I1002 14:22:23.956894 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:23Z","lastTransitionTime":"2025-10-02T14:22:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.059879 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.059946 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.059959 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.059974 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.059983 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.162973 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.163026 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.163039 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.163057 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.163071 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.264891 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.264976 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.264993 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.265016 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.265031 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.367602 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.367646 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.367654 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.367668 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.367681 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.469516 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.469557 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.469568 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.469589 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.469607 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.572309 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.572339 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.572347 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.572359 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.572367 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.675723 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.675758 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.675767 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.675788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.675802 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.779068 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.779112 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.779121 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.779136 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.779149 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.882333 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.882546 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.882555 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.882568 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.882577 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.985241 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.985277 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.985289 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.985302 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:24 crc kubenswrapper[4717]: I1002 14:22:24.985334 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:24Z","lastTransitionTime":"2025-10-02T14:22:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.088258 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.088375 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.088396 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.088420 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.088436 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.191256 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.191328 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.191346 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.191369 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.191386 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.293654 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.293744 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.293772 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.293805 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.293831 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.396803 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.396867 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.396890 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.396998 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.397028 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.500519 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.500586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.500603 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.500630 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.500657 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.644100 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.644157 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.644173 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.644198 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.644255 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.746873 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.747016 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.747043 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.747079 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.747099 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.837770 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.837855 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:25 crc kubenswrapper[4717]: E1002 14:22:25.837976 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.838048 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.838101 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:25 crc kubenswrapper[4717]: E1002 14:22:25.838062 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:25 crc kubenswrapper[4717]: E1002 14:22:25.838224 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:25 crc kubenswrapper[4717]: E1002 14:22:25.838266 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.849333 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.849370 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.849379 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.849392 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.849402 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.952076 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.952117 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.952133 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.952150 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:25 crc kubenswrapper[4717]: I1002 14:22:25.952161 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:25Z","lastTransitionTime":"2025-10-02T14:22:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.054759 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.054807 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.054819 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.054835 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.054848 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.157767 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.157819 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.157830 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.157848 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.157861 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.259981 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.260054 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.260079 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.260112 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.260134 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.362118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.362170 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.362182 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.362200 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.362212 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.464895 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.465027 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.465051 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.465076 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.465092 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.567579 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.567620 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.567632 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.567648 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.567660 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.670531 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.670570 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.670579 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.670591 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.670599 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.772463 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.772526 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.772550 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.772577 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.772598 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.875496 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.875561 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.875580 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.875605 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.875624 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.979244 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.979305 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.979325 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.979350 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:26 crc kubenswrapper[4717]: I1002 14:22:26.979369 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:26Z","lastTransitionTime":"2025-10-02T14:22:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.082263 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.082328 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.082348 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.082377 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.082396 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.185496 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.185566 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.185586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.185610 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.185628 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.288218 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.288260 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.288271 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.288284 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.288293 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.391712 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.391764 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.391778 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.391798 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.391811 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.498861 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.498906 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.498918 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.498955 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.499006 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.602450 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.602499 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.602507 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.602568 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.602580 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.706316 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.706393 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.706412 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.706440 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.706459 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.809796 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.809872 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.809891 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.809914 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.809958 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.837846 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.837901 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.837968 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:27 crc kubenswrapper[4717]: E1002 14:22:27.838131 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.838189 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:27 crc kubenswrapper[4717]: E1002 14:22:27.838391 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:27 crc kubenswrapper[4717]: E1002 14:22:27.838701 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:27 crc kubenswrapper[4717]: E1002 14:22:27.838851 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.912523 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.912594 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.912611 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.912635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:27 crc kubenswrapper[4717]: I1002 14:22:27.912652 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:27Z","lastTransitionTime":"2025-10-02T14:22:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.015335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.015395 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.015411 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.015431 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.015443 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.117061 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.117098 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.117106 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.117118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.117128 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.219596 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.219670 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.219691 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.219719 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.219736 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.322319 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.322372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.322390 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.322414 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.322429 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.424843 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.424887 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.424902 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.424922 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.424951 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.527308 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.527344 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.527353 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.527366 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.527377 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.629715 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.629755 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.629764 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.629781 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.629789 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.732338 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.732386 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.732398 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.732415 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.732427 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.834474 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.834516 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.834526 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.834541 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.834553 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.936865 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.936908 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.936917 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.936950 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:28 crc kubenswrapper[4717]: I1002 14:22:28.936961 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:28Z","lastTransitionTime":"2025-10-02T14:22:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.040209 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.040250 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.040262 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.040280 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.040290 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.143134 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.143198 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.143217 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.143241 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.143259 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.245636 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.245668 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.245676 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.245688 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.245697 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.348811 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.348853 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.348865 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.348881 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.348893 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.451731 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.451775 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.451788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.451805 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.451817 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.554148 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.554284 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.554355 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.554398 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.554424 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.657566 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.657638 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.657649 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.657666 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.657677 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.688918 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:29 crc kubenswrapper[4717]: E1002 14:22:29.689068 4717 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:22:29 crc kubenswrapper[4717]: E1002 14:22:29.689139 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs podName:36c8fcb0-a074-461c-a5d1-c01106ee4997 nodeName:}" failed. No retries permitted until 2025-10-02 14:23:33.689120876 +0000 UTC m=+164.540975322 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs") pod "network-metrics-daemon-7v6wt" (UID: "36c8fcb0-a074-461c-a5d1-c01106ee4997") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.760999 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.761073 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.761092 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.761118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.761137 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.838741 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.838819 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:29 crc kubenswrapper[4717]: E1002 14:22:29.838868 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:29 crc kubenswrapper[4717]: E1002 14:22:29.839025 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.839114 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.839169 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:29 crc kubenswrapper[4717]: E1002 14:22:29.839249 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:29 crc kubenswrapper[4717]: E1002 14:22:29.839374 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.864042 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.864085 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.864094 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.864108 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.864118 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.967040 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.967102 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.967122 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.967147 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:29 crc kubenswrapper[4717]: I1002 14:22:29.967166 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:29Z","lastTransitionTime":"2025-10-02T14:22:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.069803 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.069929 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.070015 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.070039 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.070055 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.173440 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.173493 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.173509 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.173531 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.173550 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.277057 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.277103 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.277111 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.277126 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.277135 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.379638 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.379675 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.379685 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.379700 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.379710 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.483398 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.483464 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.483487 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.483518 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.483542 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.585816 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.585852 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.585860 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.585873 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.585882 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.688445 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.688501 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.688511 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.688524 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.688535 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.792285 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.792352 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.792363 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.792379 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.792393 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.854574 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1d9164c-6127-4f40-ae97-942e6cd0faf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b14671a709b73ab92b47b612020d4d9b55c57f70ebbb9de7d91f0675f07e298\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e7e274daf38a11cab7dd4cb999369630c300ee6ff926b4775d429da904dbe8a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://35fd3c9aa676628fa5f6b41c12e6dfd863dbe9cb44faf22d1f53d48ae25170ac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://63261548e01584fc5778be0e1d7ed4bcbdf92170516cf1e9bd1bd3bdd9a13483\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://46320c4556343d0437bdd5033c0e0e5fff5a4dd947a52ef8066b7b6f128f783a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99f6c16703dbe116fa5367522f6bd8e45f383dc2dab9e96b618f774c6b41a47c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71964d0f25024da08d676b637099efd21d580428a88a92b1527218ef69e44137\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ns4hg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kwkj8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.871781 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3a32c8d3-7320-4eb1-abfb-bbcf19602b23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f40509ea39ef51ad5844f7182ae8b7f415ea6d66ae66d881e0650c07e906cca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9b23cac64e3d9917bb5df2728ef41346e60a0569a64cbc203d35ff840f790a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lfbkw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:24Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pjj24\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.885407 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"248c7f77-fbbf-4d93-adf3-a0b3d61a5329\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d32f7f7f9d7c442db5227cfdcf9d68f6d2ba186955d22d755b2e20e0ae8d26f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d1826c9c4a795b3bddc52504532119e06e09bc3f7bf38bc584000dcd0a858a5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://49d2d1a87c30b12dc26a623d42ced559ffd0720ec6358c02f17d1626f71683af\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4f86f8ee8ff04736ff8c357f2475173019f75d5fc4efb4c4252f6219835cad7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://888e004d1cabc371e120ea62df7016dab81a90a81c228e0fcc6131a9ef42ddad\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1002 14:21:05.334016 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1002 14:21:05.337490 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1586831175/tls.crt::/tmp/serving-cert-1586831175/tls.key\\\\\\\"\\\\nI1002 14:21:11.387254 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1002 14:21:11.390250 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1002 14:21:11.390307 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1002 14:21:11.390354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1002 14:21:11.390385 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1002 14:21:11.398415 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1002 14:21:11.398438 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1002 14:21:11.398456 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398464 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1002 14:21:11.398473 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1002 14:21:11.398481 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1002 14:21:11.398486 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1002 14:21:11.398492 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1002 14:21:11.400173 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7fafe6183d07a9d749150f78964a51f8129e60395277174cd238e652e6d8cd4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eaac538b4a109fa08b607cf0a7b07aa02b71c4b5e653f28bb359acaab8b81a7e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.895793 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.895839 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.895847 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.895862 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.895870 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.898352 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1413fc0e251eb0cf4c671f9dfd97190e4c62a0a833fd0a00fd433a056e6e3b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://845b9aa3b313c2229914ea7a339e9ce1bd1155eeeaa24f8bd48d05407b4bf011\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.907421 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vnsql" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58331ba8-4c83-4483-9d69-fb5c4b271c79\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba1aea35114ea64a6445c70410f1aa996e5aec3af3ce406cc9fabbd43ce8a26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gvr8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:18Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vnsql\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.922450 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.933337 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:14Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f92b9090e8b324c5a1eee1e351e6a261f7973df90ae29fd2d6b5736f59e636a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.948388 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s7n7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"424c679b-8db0-4ba4-9c8f-67a65fe38048\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:00Z\\\",\\\"message\\\":\\\"2025-10-02T14:21:14+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa\\\\n2025-10-02T14:21:14+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_966854da-5ecb-40d5-bbd5-67b0dc4669fa to /host/opt/cni/bin/\\\\n2025-10-02T14:21:15Z [verbose] multus-daemon started\\\\n2025-10-02T14:21:15Z [verbose] Readiness Indicator file check\\\\n2025-10-02T14:22:00Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:22:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d88cl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s7n7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.967005 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4de64e15-550a-4404-92fc-b355535a4bf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-02T14:22:07Z\\\",\\\"message\\\":\\\"y.go:140\\\\nI1002 14:22:07.645723 6700 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1002 14:22:07.646580 6700 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1002 14:22:07.646632 6700 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1002 14:22:07.646637 6700 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1002 14:22:07.646650 6700 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1002 14:22:07.646655 6700 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1002 14:22:07.646745 6700 factory.go:656] Stopping watch factory\\\\nI1002 14:22:07.646759 6700 ovnkube.go:599] Stopped ovnkube\\\\nI1002 14:22:07.646794 6700 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1002 14:22:07.646804 6700 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1002 14:22:07.646809 6700 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1002 14:22:07.646814 6700 handler.go:208] Removed *v1.Node event handler 2\\\\nI1002 14:22:07.646819 6700 handler.go:208] Removed *v1.Node event handler 7\\\\nI1002 14:22:07.646831 6700 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1002 14:22:07.646898 6700 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-02T14:22:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w94qv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-l7nn7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.978925 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"36c8fcb0-a074-461c-a5d1-c01106ee4997\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:25Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8kgf6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:25Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-7v6wt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.991735 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12069721-5b6b-4a5c-8b1e-5555b5adc2bd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3a4b9fce0a6d0e4b69d05f38d62afe9ff8c71fbc73c2361e030baf3af5319ef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2226ef8b94c8bdcad5b50d06df648ff472316af1954d4e6f878fe16a8504fe6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:30Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.998028 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.998075 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.998085 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.998101 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:30 crc kubenswrapper[4717]: I1002 14:22:30.998110 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:30Z","lastTransitionTime":"2025-10-02T14:22:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.003518 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5f569902-e659-45d6-afbd-5407687ee3ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c47f501737cec81afc25b9ff6ea95fefc2f71c1722e74f5b2da2fe9f9163c37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5267f40948ee26e1ad98a57c02a15fd9618e7e0b44d0dd6d8a617e963e190f8c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6eef05f0654679cb6ea0cee192b002a49d5fa6846a839de51a4ed4f1a28d0c6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4075a70acfdface9de647481614a3f14ec6f93ad703e33a382f8ad066db7495e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-02T14:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.014915 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://387f791ac266cfde225be31c6fc4eeadbc7408abb434ef9f42c085dbdd5fc820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.026261 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.034666 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5ljkq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"328e8bee-9892-4374-8985-28ac6cb2d377\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9693a0d81db0a02fdc9a11841a325bca52b98dc1e8180d6889d5dbbd7860fdaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qdv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5ljkq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.044471 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.060366 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"405aba30-0ff3-4fca-a5da-09c35263665d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b6b8e799a5550c2137b4cdcad64eca0b20fa7f5bf821d652c4397db7c84b60c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:21:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7v64t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:21:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-sk55f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.073622 4717 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"292cfee3-ea3c-4dc6-b65e-5b9a610999b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:21:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-02T14:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8512195c5fe347c4c5a03ad7f0c27080a089cf9c651fc07795c7511471bc9def\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://acb39271abdb5a4cf9c25e6524b5c8a62a6327d83f83a8d5b4a19e19c486fae6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://417e26425a4518be0cc8e71ca28093ab66b95e0b27a6675052cd75b7ef12603f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7175d4652e712aa6130d24778049efab8a2cdca23552046f9ae47f8c12e2dbea\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-02T14:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-02T14:20:51Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:31Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.100973 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.101057 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.101068 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.101086 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.101097 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.204809 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.204853 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.204862 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.204878 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.204887 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.307999 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.308041 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.308051 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.308066 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.308076 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.410431 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.410468 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.410477 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.410490 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.410501 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.512884 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.512924 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.512960 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.512972 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.512981 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.616024 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.616067 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.616077 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.616093 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.616103 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.719402 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.719450 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.719460 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.719475 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.719488 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.822709 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.822790 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.822813 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.822845 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.822866 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.837976 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:31 crc kubenswrapper[4717]: E1002 14:22:31.838107 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.837973 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.838207 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.837976 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:31 crc kubenswrapper[4717]: E1002 14:22:31.840381 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:31 crc kubenswrapper[4717]: E1002 14:22:31.841416 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:31 crc kubenswrapper[4717]: E1002 14:22:31.846861 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.858539 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.926112 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.926147 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.926156 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.926170 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:31 crc kubenswrapper[4717]: I1002 14:22:31.926179 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:31Z","lastTransitionTime":"2025-10-02T14:22:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.029335 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.029405 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.029424 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.029494 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.029525 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.133338 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.133429 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.133441 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.133460 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.133476 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.236302 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.236400 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.236431 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.236463 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.236485 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.340212 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.340251 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.340259 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.340273 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.340282 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.443315 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.443401 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.443427 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.443465 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.443493 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.545864 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.545907 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.545918 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.546135 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.546161 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.648644 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.648728 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.648748 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.648778 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.648798 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.751306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.751350 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.751362 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.751376 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.751384 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.839259 4717 scope.go:117] "RemoveContainer" containerID="c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9" Oct 02 14:22:32 crc kubenswrapper[4717]: E1002 14:22:32.839565 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.853972 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.854046 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.854062 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.854084 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.854099 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.957827 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.957868 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.957878 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.957893 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:32 crc kubenswrapper[4717]: I1002 14:22:32.957905 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:32Z","lastTransitionTime":"2025-10-02T14:22:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.061423 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.061465 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.061473 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.061488 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.061496 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.163803 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.163841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.163851 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.163865 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.163914 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.266877 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.266926 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.266951 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.266965 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.266975 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.369468 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.369513 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.369525 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.369540 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.369551 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.471966 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.472007 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.472016 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.472029 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.472039 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.574565 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.574621 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.574636 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.574654 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.574666 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.676629 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.676698 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.676717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.676739 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.676757 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.762827 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.762894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.762913 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.762965 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.762982 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.781817 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:33Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.786245 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.786304 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.786325 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.786352 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.786372 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.804561 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:33Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.808841 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.808894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.808916 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.808979 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.809003 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.824309 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:33Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.830815 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.830863 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.830879 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.830905 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.830916 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.838597 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.838720 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.838618 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.838628 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.839109 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.839217 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.839505 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.839698 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.845134 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:33Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.848892 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.848952 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.848966 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.848985 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.848998 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.862985 4717 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-02T14:22:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"57121bc9-a96d-4ae5-b385-b930e368e855\\\",\\\"systemUUID\\\":\\\"f4ff76e8-93a4-4bac-8551-5d1b7e988a7f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-02T14:22:33Z is after 2025-08-24T17:21:41Z" Oct 02 14:22:33 crc kubenswrapper[4717]: E1002 14:22:33.863173 4717 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.864735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.864786 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.864802 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.864822 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.864836 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.967336 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.967426 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.967443 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.967465 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:33 crc kubenswrapper[4717]: I1002 14:22:33.967480 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:33Z","lastTransitionTime":"2025-10-02T14:22:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.069379 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.069421 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.069429 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.069441 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.069450 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.171918 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.172046 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.172063 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.172088 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.172105 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.273881 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.273922 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.273952 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.273968 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.273982 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.375452 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.375489 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.375500 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.375513 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.375522 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.477819 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.477855 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.477866 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.477881 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.477893 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.580221 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.580263 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.580272 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.580286 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.580294 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.683112 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.683147 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.683158 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.683183 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.683193 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.785419 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.785458 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.785470 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.785495 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.785507 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.887327 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.887375 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.887388 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.887403 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.887416 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.989609 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.989646 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.989658 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.989674 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:34 crc kubenswrapper[4717]: I1002 14:22:34.989685 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:34Z","lastTransitionTime":"2025-10-02T14:22:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.091673 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.091713 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.091721 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.091735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.091746 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.193894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.193949 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.193962 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.193978 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.193990 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.296372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.296413 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.296422 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.296436 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.296445 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.398494 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.398532 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.398540 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.398554 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.398563 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.501418 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.501467 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.501477 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.501495 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.501506 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.604558 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.604614 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.604641 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.604909 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.604924 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.708533 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.708573 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.708586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.708603 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.708613 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.812277 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.812667 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.812694 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.812723 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.812746 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.838855 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.838909 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.838901 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.838855 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:35 crc kubenswrapper[4717]: E1002 14:22:35.839131 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:35 crc kubenswrapper[4717]: E1002 14:22:35.839251 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:35 crc kubenswrapper[4717]: E1002 14:22:35.839330 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:35 crc kubenswrapper[4717]: E1002 14:22:35.839403 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.915709 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.915908 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.916013 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.916078 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:35 crc kubenswrapper[4717]: I1002 14:22:35.916153 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:35Z","lastTransitionTime":"2025-10-02T14:22:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.022230 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.022274 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.022290 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.022306 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.022343 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.247822 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.248130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.248195 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.248258 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.248328 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.350240 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.350503 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.350619 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.350703 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.350766 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.453128 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.453161 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.453170 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.453198 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.453207 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.555634 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.555656 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.555665 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.555677 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.555686 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.658773 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.658835 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.658845 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.658859 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.658870 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.761298 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.762161 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.762179 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.762192 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.762199 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.864644 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.864693 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.864702 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.864715 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.864724 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.967185 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.967537 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.967612 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.967675 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:36 crc kubenswrapper[4717]: I1002 14:22:36.967741 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:36Z","lastTransitionTime":"2025-10-02T14:22:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.069600 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.069635 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.069643 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.069656 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.069665 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.172372 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.172425 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.172440 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.172462 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.172477 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.275408 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.275644 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.275713 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.275778 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.275841 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.378531 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.378571 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.378606 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.378622 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.378633 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.481487 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.481553 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.481567 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.481581 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.481591 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.583779 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.583828 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.583838 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.583854 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.583865 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.686126 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.686613 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.686641 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.686665 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.686680 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.790104 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.790137 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.790145 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.790159 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.790168 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.838033 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.838071 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.838079 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.838038 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:37 crc kubenswrapper[4717]: E1002 14:22:37.838230 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:37 crc kubenswrapper[4717]: E1002 14:22:37.838382 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:37 crc kubenswrapper[4717]: E1002 14:22:37.838474 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:37 crc kubenswrapper[4717]: E1002 14:22:37.838609 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.892426 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.892478 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.892489 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.892516 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.892528 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.995037 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.995078 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.995088 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.995102 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:37 crc kubenswrapper[4717]: I1002 14:22:37.995110 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:37Z","lastTransitionTime":"2025-10-02T14:22:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.097771 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.097834 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.097853 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.097880 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.097897 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.200295 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.200356 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.200368 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.200388 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.200402 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.303313 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.303377 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.303396 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.303420 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.303437 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.406739 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.406788 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.406800 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.406821 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.406833 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.509690 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.509744 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.509759 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.509780 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.509791 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.611436 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.611484 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.611493 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.611506 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.611515 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.713199 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.713248 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.713257 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.713272 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.713282 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.816006 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.816049 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.816065 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.816081 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.816091 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.918896 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.918991 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.919028 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.919058 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:38 crc kubenswrapper[4717]: I1002 14:22:38.919079 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:38Z","lastTransitionTime":"2025-10-02T14:22:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.020900 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.020950 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.020959 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.020972 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.020981 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.123118 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.123358 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.123420 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.123494 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.123555 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.225277 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.225519 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.225581 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.225649 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.225744 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.328747 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.328816 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.328835 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.328864 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.328884 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.431511 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.431558 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.431570 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.431587 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.431599 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.534703 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.534749 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.534758 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.534773 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.534783 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.637128 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.637171 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.637183 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.637197 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.637208 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.739574 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.739617 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.739630 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.739649 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.739659 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.838606 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.838626 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.838648 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.838660 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:39 crc kubenswrapper[4717]: E1002 14:22:39.839151 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:39 crc kubenswrapper[4717]: E1002 14:22:39.839242 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:39 crc kubenswrapper[4717]: E1002 14:22:39.839311 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:39 crc kubenswrapper[4717]: E1002 14:22:39.839433 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.843395 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.843441 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.843454 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.843474 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.843487 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.945624 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.945665 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.945675 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.945689 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:39 crc kubenswrapper[4717]: I1002 14:22:39.945699 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:39Z","lastTransitionTime":"2025-10-02T14:22:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.049400 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.049444 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.049480 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.049499 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.049512 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.151618 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.151673 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.151685 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.151702 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.151715 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.253637 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.253680 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.253696 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.253711 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.253721 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.356271 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.356316 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.356359 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.356376 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.356386 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.458351 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.458390 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.458417 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.458432 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.458447 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.561046 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.561098 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.561114 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.561127 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.561135 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.663178 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.663207 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.663215 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.663229 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.663237 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.765096 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.765148 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.765157 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.765176 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.765185 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.868157 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.868199 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.868207 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.868220 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.868229 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.881879 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=89.881859176 podStartE2EDuration="1m29.881859176s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:40.865415917 +0000 UTC m=+111.717270413" watchObservedRunningTime="2025-10-02 14:22:40.881859176 +0000 UTC m=+111.733713622" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.921361 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-5ljkq" podStartSLOduration=89.921342978 podStartE2EDuration="1m29.921342978s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:40.908042204 +0000 UTC m=+111.759896670" watchObservedRunningTime="2025-10-02 14:22:40.921342978 +0000 UTC m=+111.773197424" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.930808 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podStartSLOduration=89.93079167 podStartE2EDuration="1m29.93079167s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:40.930685377 +0000 UTC m=+111.782539833" watchObservedRunningTime="2025-10-02 14:22:40.93079167 +0000 UTC m=+111.782646116" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.970838 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.970871 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.970879 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.970894 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.970902 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:40Z","lastTransitionTime":"2025-10-02T14:22:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.973254 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=9.973210731 podStartE2EDuration="9.973210731s" podCreationTimestamp="2025-10-02 14:22:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:40.954200444 +0000 UTC m=+111.806054900" watchObservedRunningTime="2025-10-02 14:22:40.973210731 +0000 UTC m=+111.825065177" Oct 02 14:22:40 crc kubenswrapper[4717]: I1002 14:22:40.993287 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-kwkj8" podStartSLOduration=89.993221875 podStartE2EDuration="1m29.993221875s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:40.992824863 +0000 UTC m=+111.844679309" watchObservedRunningTime="2025-10-02 14:22:40.993221875 +0000 UTC m=+111.845076341" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.007162 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pjj24" podStartSLOduration=89.007140816 podStartE2EDuration="1m29.007140816s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:41.006525379 +0000 UTC m=+111.858379875" watchObservedRunningTime="2025-10-02 14:22:41.007140816 +0000 UTC m=+111.858995272" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.029813 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=90.029792949 podStartE2EDuration="1m30.029792949s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:41.029230834 +0000 UTC m=+111.881085310" watchObservedRunningTime="2025-10-02 14:22:41.029792949 +0000 UTC m=+111.881647395" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.048041 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-vnsql" podStartSLOduration=89.048020345 podStartE2EDuration="1m29.048020345s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:41.046995888 +0000 UTC m=+111.898850334" watchObservedRunningTime="2025-10-02 14:22:41.048020345 +0000 UTC m=+111.899874791" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.069965 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=61.069947239 podStartE2EDuration="1m1.069947239s" podCreationTimestamp="2025-10-02 14:21:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:41.069859157 +0000 UTC m=+111.921713603" watchObservedRunningTime="2025-10-02 14:22:41.069947239 +0000 UTC m=+111.921801685" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.073538 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.073579 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.073591 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.073689 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.073706 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.110657 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-s7n7q" podStartSLOduration=90.110641315 podStartE2EDuration="1m30.110641315s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:41.110522451 +0000 UTC m=+111.962376897" watchObservedRunningTime="2025-10-02 14:22:41.110641315 +0000 UTC m=+111.962495761" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.150173 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=45.150147048 podStartE2EDuration="45.150147048s" podCreationTimestamp="2025-10-02 14:21:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:41.150107927 +0000 UTC m=+112.001962373" watchObservedRunningTime="2025-10-02 14:22:41.150147048 +0000 UTC m=+112.002001514" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.175777 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.175830 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.175839 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.175858 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.175875 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.278103 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.278146 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.278159 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.278175 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.278187 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.380359 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.380395 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.380404 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.380436 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.380446 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.482636 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.482689 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.482698 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.482713 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.482721 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.584817 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.584881 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.584901 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.584924 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.584998 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.687470 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.687505 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.687514 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.687545 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.687555 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.791013 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.791081 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.791099 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.791122 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.791139 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.837984 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.837992 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.838001 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.838255 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:41 crc kubenswrapper[4717]: E1002 14:22:41.838414 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:41 crc kubenswrapper[4717]: E1002 14:22:41.838537 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:41 crc kubenswrapper[4717]: E1002 14:22:41.838664 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:41 crc kubenswrapper[4717]: E1002 14:22:41.838760 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.893462 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.893513 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.893523 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.893537 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.893546 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.995755 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.995794 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.995806 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.995822 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:41 crc kubenswrapper[4717]: I1002 14:22:41.995834 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:41Z","lastTransitionTime":"2025-10-02T14:22:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.098274 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.098307 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.098315 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.098327 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.098336 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.200324 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.200368 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.200377 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.200392 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.200402 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.302586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.302615 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.302622 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.302638 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.302648 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.405130 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.405162 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.405169 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.405180 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.405192 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.508083 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.508115 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.508125 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.508138 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.508146 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.610036 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.610081 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.610093 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.610107 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.610119 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.712516 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.712560 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.712568 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.712581 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.712591 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.815127 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.815186 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.815200 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.815224 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.815242 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.918144 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.918181 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.918189 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.918202 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:42 crc kubenswrapper[4717]: I1002 14:22:42.918211 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:42Z","lastTransitionTime":"2025-10-02T14:22:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.021392 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.021462 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.021480 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.021502 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.021518 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.125171 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.125265 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.125286 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.125326 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.125355 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.229100 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.229200 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.229215 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.229238 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.229253 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.331997 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.332058 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.332066 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.332081 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.332090 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.434652 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.434709 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.434719 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.434735 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.434746 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.536458 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.536542 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.536552 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.536569 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.536578 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.639456 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.639500 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.639508 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.639522 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.639532 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.742565 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.742626 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.742640 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.742669 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.742683 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.838707 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.838821 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.838865 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.838840 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:43 crc kubenswrapper[4717]: E1002 14:22:43.839190 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:43 crc kubenswrapper[4717]: E1002 14:22:43.839392 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:43 crc kubenswrapper[4717]: E1002 14:22:43.839544 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:43 crc kubenswrapper[4717]: E1002 14:22:43.839634 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.845559 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.845598 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.845610 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.845629 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.845645 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.949586 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.949661 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.949686 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.949717 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:43 crc kubenswrapper[4717]: I1002 14:22:43.949738 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:43Z","lastTransitionTime":"2025-10-02T14:22:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.052452 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.052538 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.052560 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.052590 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.052610 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:44Z","lastTransitionTime":"2025-10-02T14:22:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.056177 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.056263 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.056282 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.056308 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.056327 4717 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-02T14:22:44Z","lastTransitionTime":"2025-10-02T14:22:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.103905 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4"] Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.104869 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.106911 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.107288 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.108426 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.109400 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.124851 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f54710f-db70-4c9b-8bde-857cc2f44903-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.124904 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5f54710f-db70-4c9b-8bde-857cc2f44903-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.124992 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5f54710f-db70-4c9b-8bde-857cc2f44903-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.125019 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5f54710f-db70-4c9b-8bde-857cc2f44903-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.125046 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5f54710f-db70-4c9b-8bde-857cc2f44903-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.225727 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5f54710f-db70-4c9b-8bde-857cc2f44903-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.225794 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5f54710f-db70-4c9b-8bde-857cc2f44903-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.225832 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5f54710f-db70-4c9b-8bde-857cc2f44903-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.225880 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f54710f-db70-4c9b-8bde-857cc2f44903-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.225903 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/5f54710f-db70-4c9b-8bde-857cc2f44903-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.225918 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5f54710f-db70-4c9b-8bde-857cc2f44903-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.226104 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/5f54710f-db70-4c9b-8bde-857cc2f44903-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.227359 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5f54710f-db70-4c9b-8bde-857cc2f44903-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.239346 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f54710f-db70-4c9b-8bde-857cc2f44903-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.248767 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5f54710f-db70-4c9b-8bde-857cc2f44903-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7kxr4\" (UID: \"5f54710f-db70-4c9b-8bde-857cc2f44903\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:44 crc kubenswrapper[4717]: I1002 14:22:44.427303 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" Oct 02 14:22:45 crc kubenswrapper[4717]: I1002 14:22:45.394737 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" event={"ID":"5f54710f-db70-4c9b-8bde-857cc2f44903","Type":"ContainerStarted","Data":"f3c7fedf10330b65e61df8fe0a3ffb8fb5bc07f0d2a2652e21177fbece5b4238"} Oct 02 14:22:45 crc kubenswrapper[4717]: I1002 14:22:45.394809 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" event={"ID":"5f54710f-db70-4c9b-8bde-857cc2f44903","Type":"ContainerStarted","Data":"196cba48a914d311438a159bfa911d421f361d600c70bdc65fef2b6b2e5dd783"} Oct 02 14:22:45 crc kubenswrapper[4717]: I1002 14:22:45.838238 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:45 crc kubenswrapper[4717]: I1002 14:22:45.838241 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:45 crc kubenswrapper[4717]: E1002 14:22:45.838781 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:45 crc kubenswrapper[4717]: I1002 14:22:45.838302 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:45 crc kubenswrapper[4717]: E1002 14:22:45.838888 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:45 crc kubenswrapper[4717]: I1002 14:22:45.838278 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:45 crc kubenswrapper[4717]: E1002 14:22:45.839014 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:45 crc kubenswrapper[4717]: E1002 14:22:45.838791 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:46 crc kubenswrapper[4717]: I1002 14:22:46.839117 4717 scope.go:117] "RemoveContainer" containerID="c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9" Oct 02 14:22:46 crc kubenswrapper[4717]: E1002 14:22:46.839311 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-l7nn7_openshift-ovn-kubernetes(4de64e15-550a-4404-92fc-b355535a4bf2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.404154 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/1.log" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.405162 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/0.log" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.405241 4717 generic.go:334] "Generic (PLEG): container finished" podID="424c679b-8db0-4ba4-9c8f-67a65fe38048" containerID="65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d" exitCode=1 Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.405282 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s7n7q" event={"ID":"424c679b-8db0-4ba4-9c8f-67a65fe38048","Type":"ContainerDied","Data":"65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d"} Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.405319 4717 scope.go:117] "RemoveContainer" containerID="0c025bf39307a41a48b26f97e0c66bd31548e9cb9ca3801a021bed6393095893" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.407021 4717 scope.go:117] "RemoveContainer" containerID="65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d" Oct 02 14:22:47 crc kubenswrapper[4717]: E1002 14:22:47.407424 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-s7n7q_openshift-multus(424c679b-8db0-4ba4-9c8f-67a65fe38048)\"" pod="openshift-multus/multus-s7n7q" podUID="424c679b-8db0-4ba4-9c8f-67a65fe38048" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.435608 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7kxr4" podStartSLOduration=96.435591392 podStartE2EDuration="1m36.435591392s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:22:45.407756339 +0000 UTC m=+116.259610785" watchObservedRunningTime="2025-10-02 14:22:47.435591392 +0000 UTC m=+118.287445838" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.838023 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.838062 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.838033 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:47 crc kubenswrapper[4717]: E1002 14:22:47.838175 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:47 crc kubenswrapper[4717]: E1002 14:22:47.838241 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:47 crc kubenswrapper[4717]: E1002 14:22:47.838376 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:47 crc kubenswrapper[4717]: I1002 14:22:47.838795 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:47 crc kubenswrapper[4717]: E1002 14:22:47.843559 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:48 crc kubenswrapper[4717]: I1002 14:22:48.410710 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/1.log" Oct 02 14:22:49 crc kubenswrapper[4717]: I1002 14:22:49.838090 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:49 crc kubenswrapper[4717]: E1002 14:22:49.838241 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:49 crc kubenswrapper[4717]: I1002 14:22:49.838425 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:49 crc kubenswrapper[4717]: E1002 14:22:49.838486 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:49 crc kubenswrapper[4717]: I1002 14:22:49.838616 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:49 crc kubenswrapper[4717]: E1002 14:22:49.838671 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:49 crc kubenswrapper[4717]: I1002 14:22:49.838770 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:49 crc kubenswrapper[4717]: E1002 14:22:49.838810 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:50 crc kubenswrapper[4717]: E1002 14:22:50.833109 4717 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 02 14:22:51 crc kubenswrapper[4717]: E1002 14:22:51.053209 4717 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 14:22:51 crc kubenswrapper[4717]: I1002 14:22:51.838771 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:51 crc kubenswrapper[4717]: I1002 14:22:51.838857 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:51 crc kubenswrapper[4717]: I1002 14:22:51.838880 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:51 crc kubenswrapper[4717]: I1002 14:22:51.838895 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:51 crc kubenswrapper[4717]: E1002 14:22:51.839058 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:51 crc kubenswrapper[4717]: E1002 14:22:51.839213 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:51 crc kubenswrapper[4717]: E1002 14:22:51.839310 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:51 crc kubenswrapper[4717]: E1002 14:22:51.839442 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:53 crc kubenswrapper[4717]: I1002 14:22:53.838590 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:53 crc kubenswrapper[4717]: I1002 14:22:53.838642 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:53 crc kubenswrapper[4717]: I1002 14:22:53.838650 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:53 crc kubenswrapper[4717]: E1002 14:22:53.838781 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:53 crc kubenswrapper[4717]: I1002 14:22:53.838836 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:53 crc kubenswrapper[4717]: E1002 14:22:53.838898 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:53 crc kubenswrapper[4717]: E1002 14:22:53.839183 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:53 crc kubenswrapper[4717]: E1002 14:22:53.839285 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:55 crc kubenswrapper[4717]: I1002 14:22:55.837789 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:55 crc kubenswrapper[4717]: I1002 14:22:55.837790 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:55 crc kubenswrapper[4717]: E1002 14:22:55.837945 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:55 crc kubenswrapper[4717]: I1002 14:22:55.837816 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:55 crc kubenswrapper[4717]: I1002 14:22:55.837790 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:55 crc kubenswrapper[4717]: E1002 14:22:55.838039 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:55 crc kubenswrapper[4717]: E1002 14:22:55.838095 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:55 crc kubenswrapper[4717]: E1002 14:22:55.838148 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:56 crc kubenswrapper[4717]: E1002 14:22:56.054427 4717 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 14:22:57 crc kubenswrapper[4717]: I1002 14:22:57.838544 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:57 crc kubenswrapper[4717]: E1002 14:22:57.838708 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:57 crc kubenswrapper[4717]: I1002 14:22:57.838777 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:57 crc kubenswrapper[4717]: I1002 14:22:57.838818 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:57 crc kubenswrapper[4717]: I1002 14:22:57.838777 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:57 crc kubenswrapper[4717]: E1002 14:22:57.838918 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:22:57 crc kubenswrapper[4717]: E1002 14:22:57.838999 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:57 crc kubenswrapper[4717]: E1002 14:22:57.839082 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:59 crc kubenswrapper[4717]: I1002 14:22:59.838748 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:22:59 crc kubenswrapper[4717]: I1002 14:22:59.838760 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:22:59 crc kubenswrapper[4717]: E1002 14:22:59.839679 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:22:59 crc kubenswrapper[4717]: I1002 14:22:59.838875 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:22:59 crc kubenswrapper[4717]: E1002 14:22:59.839953 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:22:59 crc kubenswrapper[4717]: E1002 14:22:59.839794 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:22:59 crc kubenswrapper[4717]: I1002 14:22:59.838820 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:22:59 crc kubenswrapper[4717]: E1002 14:22:59.840491 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:23:01 crc kubenswrapper[4717]: E1002 14:23:01.054982 4717 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 14:23:01 crc kubenswrapper[4717]: I1002 14:23:01.837962 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:01 crc kubenswrapper[4717]: I1002 14:23:01.838857 4717 scope.go:117] "RemoveContainer" containerID="65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d" Oct 02 14:23:01 crc kubenswrapper[4717]: E1002 14:23:01.839512 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:23:01 crc kubenswrapper[4717]: I1002 14:23:01.839806 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:23:01 crc kubenswrapper[4717]: E1002 14:23:01.839923 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:23:01 crc kubenswrapper[4717]: I1002 14:23:01.839894 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:23:01 crc kubenswrapper[4717]: E1002 14:23:01.840175 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:23:01 crc kubenswrapper[4717]: I1002 14:23:01.840199 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:23:01 crc kubenswrapper[4717]: E1002 14:23:01.840278 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:23:01 crc kubenswrapper[4717]: I1002 14:23:01.841085 4717 scope.go:117] "RemoveContainer" containerID="c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9" Oct 02 14:23:02 crc kubenswrapper[4717]: I1002 14:23:02.460834 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/1.log" Oct 02 14:23:02 crc kubenswrapper[4717]: I1002 14:23:02.461193 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s7n7q" event={"ID":"424c679b-8db0-4ba4-9c8f-67a65fe38048","Type":"ContainerStarted","Data":"3527acb10ee45ae3438864810d930ced0270423b8149f122db3ebeef60142df5"} Oct 02 14:23:02 crc kubenswrapper[4717]: I1002 14:23:02.462963 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/3.log" Oct 02 14:23:02 crc kubenswrapper[4717]: I1002 14:23:02.464979 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerStarted","Data":"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46"} Oct 02 14:23:02 crc kubenswrapper[4717]: I1002 14:23:02.465266 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:23:02 crc kubenswrapper[4717]: I1002 14:23:02.671621 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podStartSLOduration=111.671597429 podStartE2EDuration="1m51.671597429s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:02.506369935 +0000 UTC m=+133.358224381" watchObservedRunningTime="2025-10-02 14:23:02.671597429 +0000 UTC m=+133.523451895" Oct 02 14:23:02 crc kubenswrapper[4717]: I1002 14:23:02.672987 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-7v6wt"] Oct 02 14:23:02 crc kubenswrapper[4717]: I1002 14:23:02.673123 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:23:02 crc kubenswrapper[4717]: E1002 14:23:02.673248 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:23:03 crc kubenswrapper[4717]: I1002 14:23:03.838013 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:03 crc kubenswrapper[4717]: I1002 14:23:03.838104 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:23:03 crc kubenswrapper[4717]: I1002 14:23:03.838182 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:23:03 crc kubenswrapper[4717]: E1002 14:23:03.838178 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:23:03 crc kubenswrapper[4717]: E1002 14:23:03.838256 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:23:03 crc kubenswrapper[4717]: E1002 14:23:03.838322 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:23:04 crc kubenswrapper[4717]: I1002 14:23:04.838878 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:23:04 crc kubenswrapper[4717]: E1002 14:23:04.839181 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-7v6wt" podUID="36c8fcb0-a074-461c-a5d1-c01106ee4997" Oct 02 14:23:05 crc kubenswrapper[4717]: I1002 14:23:05.838359 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:05 crc kubenswrapper[4717]: I1002 14:23:05.838449 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:23:05 crc kubenswrapper[4717]: E1002 14:23:05.838534 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 02 14:23:05 crc kubenswrapper[4717]: I1002 14:23:05.838597 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:23:05 crc kubenswrapper[4717]: E1002 14:23:05.838669 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 02 14:23:05 crc kubenswrapper[4717]: E1002 14:23:05.838736 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 02 14:23:06 crc kubenswrapper[4717]: I1002 14:23:06.838351 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:23:06 crc kubenswrapper[4717]: I1002 14:23:06.840534 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 02 14:23:06 crc kubenswrapper[4717]: I1002 14:23:06.840871 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 02 14:23:07 crc kubenswrapper[4717]: I1002 14:23:07.838322 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:07 crc kubenswrapper[4717]: I1002 14:23:07.838332 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:23:07 crc kubenswrapper[4717]: I1002 14:23:07.838490 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:23:07 crc kubenswrapper[4717]: I1002 14:23:07.842362 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 02 14:23:07 crc kubenswrapper[4717]: I1002 14:23:07.842391 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 02 14:23:07 crc kubenswrapper[4717]: I1002 14:23:07.842478 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 02 14:23:07 crc kubenswrapper[4717]: I1002 14:23:07.842686 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 02 14:23:10 crc kubenswrapper[4717]: I1002 14:23:10.363163 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.532468 4717 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.563510 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d6qrw"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.564087 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.564483 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.564970 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gkzjm"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.565317 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.565682 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.569190 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.569481 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 02 14:23:14 crc kubenswrapper[4717]: W1002 14:23:14.569523 4717 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Oct 02 14:23:14 crc kubenswrapper[4717]: E1002 14:23:14.569571 4717 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.569639 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.569646 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.569844 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.569957 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.570017 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.572171 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.572379 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.572522 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.572763 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.572993 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.573459 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.573653 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.573788 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.573989 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.577283 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.577284 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.578008 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.578395 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.580423 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cqd52"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.581278 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.590653 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2z55s"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.591409 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.593206 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.593709 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.598812 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.599123 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.599226 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.600703 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.600916 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.601234 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.602546 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.604119 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.604361 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.604497 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.604500 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.606246 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-r4gdr"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.607021 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qmztn"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.607678 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-l2f4x"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.621453 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.622335 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.607801 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.608170 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.608209 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.608396 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.608431 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.608467 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.608506 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.608538 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.608573 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.623201 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.623433 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.607394 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.606613 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.623270 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.623533 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.641114 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.641534 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.641974 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.646627 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-n9m2d"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.646851 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.647038 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wn65n"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.647261 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.647465 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h6pmp"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.647584 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.647805 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.648150 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.648860 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.649341 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-sf5xk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.649623 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-sf5xk" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.649821 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.650122 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xwrtk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.650910 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.651806 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.652588 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkhsn"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.652867 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.653066 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.656371 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.656780 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.657511 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.658021 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.658307 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.658529 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.658711 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.658771 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.658768 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660707 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-config\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660760 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjsdb\" (UniqueName: \"kubernetes.io/projected/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-kube-api-access-pjsdb\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660785 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95017b18-3508-46ed-a3a7-a6834d5ada15-config\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660814 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/95017b18-3508-46ed-a3a7-a6834d5ada15-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660835 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-audit-dir\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660859 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-client-ca\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660876 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660891 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5hrq\" (UniqueName: \"kubernetes.io/projected/4ee2df25-8a54-4608-b82e-41edda414d2b-kube-api-access-r5hrq\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660910 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660949 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/95017b18-3508-46ed-a3a7-a6834d5ada15-images\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660967 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-audit-policies\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.660991 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-encryption-config\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.661008 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ee2df25-8a54-4608-b82e-41edda414d2b-serving-cert\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.661033 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.661058 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-etcd-client\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.661074 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-serving-cert\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.661102 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj8rz\" (UniqueName: \"kubernetes.io/projected/95017b18-3508-46ed-a3a7-a6834d5ada15-kube-api-access-mj8rz\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.661267 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.663176 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2z55s"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.663447 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.665758 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d6qrw"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.667403 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gkzjm"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.667513 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cqd52"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.668491 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.671243 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.671549 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.671692 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.671805 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.671916 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.672231 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.672966 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.673040 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.673232 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.673520 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.673522 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.673686 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.673750 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.674156 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.674421 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.675280 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.675456 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.675574 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.675969 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676095 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676103 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676190 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676277 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676311 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676369 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676420 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676540 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676599 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676660 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676698 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676784 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676833 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676889 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676987 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677042 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677141 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677240 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677350 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677384 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677399 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.676788 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677503 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677587 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677706 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.677842 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.678029 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.679977 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.681772 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.698231 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-xcjdc"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.701640 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.705330 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.718107 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.719212 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.719297 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.720211 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.720314 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.721391 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.723430 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-x9gbl"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.724163 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.724320 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.724400 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.726028 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.726228 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.727089 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.727643 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.731978 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.732230 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.731990 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.733156 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.733258 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.733870 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.735264 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.736167 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.736761 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.737086 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.737571 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.738443 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.739179 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.739565 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.739962 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.740682 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rncj"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.741258 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.743113 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.743658 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.744139 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kwgnk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.744591 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.745449 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.745977 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.746063 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.746828 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-n9m2d"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.748024 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wn65n"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.749255 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qmztn"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.750970 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.752305 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.755449 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-sf5xk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.756392 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h6pmp"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.762807 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.762884 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.763630 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj8rz\" (UniqueName: \"kubernetes.io/projected/95017b18-3508-46ed-a3a7-a6834d5ada15-kube-api-access-mj8rz\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.763683 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-config\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.763711 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjsdb\" (UniqueName: \"kubernetes.io/projected/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-kube-api-access-pjsdb\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.763768 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpjcn\" (UniqueName: \"kubernetes.io/projected/279ce48f-4f2a-407a-bec4-2e0752b71c2c-kube-api-access-lpjcn\") pod \"cluster-samples-operator-665b6dd947-jl7d4\" (UID: \"279ce48f-4f2a-407a-bec4-2e0752b71c2c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.763803 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95017b18-3508-46ed-a3a7-a6834d5ada15-config\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.763845 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/95017b18-3508-46ed-a3a7-a6834d5ada15-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.763876 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-audit-dir\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.764095 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-client-ca\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.764127 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.764311 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/279ce48f-4f2a-407a-bec4-2e0752b71c2c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jl7d4\" (UID: \"279ce48f-4f2a-407a-bec4-2e0752b71c2c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.764349 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5hrq\" (UniqueName: \"kubernetes.io/projected/4ee2df25-8a54-4608-b82e-41edda414d2b-kube-api-access-r5hrq\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.764581 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.764617 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/95017b18-3508-46ed-a3a7-a6834d5ada15-images\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.764800 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-audit-policies\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.765026 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-encryption-config\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.765060 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ee2df25-8a54-4608-b82e-41edda414d2b-serving-cert\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.765160 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.765212 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-etcd-client\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.765247 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-serving-cert\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.765970 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-config\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.766325 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-client-ca\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.766664 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/95017b18-3508-46ed-a3a7-a6834d5ada15-images\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.766831 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95017b18-3508-46ed-a3a7-a6834d5ada15-config\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.766988 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-audit-dir\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.769033 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.769720 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-audit-policies\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.771344 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.772921 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.773108 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.774511 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.775011 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-serving-cert\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.775830 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-encryption-config\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.776987 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/95017b18-3508-46ed-a3a7-a6834d5ada15-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.777878 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.801287 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-etcd-client\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.801734 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ee2df25-8a54-4608-b82e-41edda414d2b-serving-cert\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.802246 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.805012 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.805586 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-r4gdr"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.807283 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.808445 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xcjdc"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.811111 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.813560 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.816545 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xwrtk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.818341 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.820037 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.821727 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4mg6p"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.823344 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.823622 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-89nf8"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.824165 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-89nf8" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.825366 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.825858 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.827365 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.828921 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.830711 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rncj"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.832534 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.833706 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.835728 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.837069 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.843381 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-x9gbl"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.843414 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkhsn"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.843426 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4mg6p"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.843437 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-89nf8"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.844502 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.846371 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kwgnk"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.846529 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.848148 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-p76lr"] Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.849383 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.866159 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.866508 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpjcn\" (UniqueName: \"kubernetes.io/projected/279ce48f-4f2a-407a-bec4-2e0752b71c2c-kube-api-access-lpjcn\") pod \"cluster-samples-operator-665b6dd947-jl7d4\" (UID: \"279ce48f-4f2a-407a-bec4-2e0752b71c2c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.866565 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/279ce48f-4f2a-407a-bec4-2e0752b71c2c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jl7d4\" (UID: \"279ce48f-4f2a-407a-bec4-2e0752b71c2c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.870509 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/279ce48f-4f2a-407a-bec4-2e0752b71c2c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jl7d4\" (UID: \"279ce48f-4f2a-407a-bec4-2e0752b71c2c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.886906 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.906902 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.925964 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.946466 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.966898 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 02 14:23:14 crc kubenswrapper[4717]: I1002 14:23:14.986318 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.006426 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.027700 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.047664 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.066851 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.085850 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.106158 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.126379 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.146456 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.166502 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.186391 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.206924 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.227392 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.247164 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.268833 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.286826 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.306188 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.327155 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.346986 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.366971 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.386956 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.406791 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.428968 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.446928 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.466498 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.487325 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.506791 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.527414 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.586623 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.606739 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.633580 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.646217 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.666885 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.685969 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.706084 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.724347 4717 request.go:700] Waited for 1.003700696s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-scheduler-operator/secrets?fieldSelector=metadata.name%3Dopenshift-kube-scheduler-operator-dockercfg-qt55r&limit=500&resourceVersion=0 Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.725464 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.746700 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.766464 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.785679 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.806105 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.825781 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.845652 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.866006 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.886138 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.905253 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.926437 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.946568 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.965547 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 02 14:23:15 crc kubenswrapper[4717]: I1002 14:23:15.986707 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.005813 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.026276 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.046015 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.065999 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.086367 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.106280 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.136737 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.145667 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.165884 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.187682 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.206207 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.226648 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.246528 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.266562 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.286601 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.307016 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.326262 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.345756 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.365708 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.387121 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.406865 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.426839 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.472708 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjsdb\" (UniqueName: \"kubernetes.io/projected/3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3-kube-api-access-pjsdb\") pod \"apiserver-7bbb656c7d-d2wpl\" (UID: \"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.487545 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj8rz\" (UniqueName: \"kubernetes.io/projected/95017b18-3508-46ed-a3a7-a6834d5ada15-kube-api-access-mj8rz\") pod \"machine-api-operator-5694c8668f-d6qrw\" (UID: \"95017b18-3508-46ed-a3a7-a6834d5ada15\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.502658 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5hrq\" (UniqueName: \"kubernetes.io/projected/4ee2df25-8a54-4608-b82e-41edda414d2b-kube-api-access-r5hrq\") pod \"controller-manager-879f6c89f-gkzjm\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.508209 4717 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.526476 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.547194 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.568087 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.586840 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.607063 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.627153 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.646842 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.667750 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.687081 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.704567 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.724466 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpjcn\" (UniqueName: \"kubernetes.io/projected/279ce48f-4f2a-407a-bec4-2e0752b71c2c-kube-api-access-lpjcn\") pod \"cluster-samples-operator-665b6dd947-jl7d4\" (UID: \"279ce48f-4f2a-407a-bec4-2e0752b71c2c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.724573 4717 request.go:700] Waited for 1.176555105s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/persistentvolumes/pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.742346 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.767651 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.770080 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788474 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-config\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788511 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/400b985e-f209-45b1-afa7-6904803111e5-serving-cert\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788530 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-trusted-ca-bundle\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788545 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/472afeb7-afee-4a47-ae7f-2c879d9d1dac-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788563 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrjtl\" (UniqueName: \"kubernetes.io/projected/ab766803-fd23-476d-a273-ddf3c6dd237b-kube-api-access-jrjtl\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788582 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-serving-cert\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788599 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788616 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-etcd-client\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788632 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788649 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788675 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-bound-sa-token\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788689 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da43c1f8-cc18-485d-ba7e-f8761d29584c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788704 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b3f6b3b-4788-41a8-ad23-71d0595da58e-serving-cert\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788720 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1b3f6b3b-4788-41a8-ad23-71d0595da58e-trusted-ca\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788742 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7c8b49f-9023-4cf9-b276-525da51e498d-service-ca-bundle\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788758 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d956a66-02c1-42f4-8b84-0772796d4ff5-auth-proxy-config\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788775 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e75ba71d-5bb5-435f-b450-d47523b91d73-audit-dir\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788789 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-policies\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788807 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788823 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-client-ca\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788838 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f2943f5f-caac-4566-b42e-5ef273488f2f-console-oauth-config\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788873 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-config\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788888 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788913 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-oauth-serving-cert\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788941 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-config\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788958 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-service-ca-bundle\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.788973 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789684 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789710 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-console-config\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789729 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-client\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789752 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-image-import-ca\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789771 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789789 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3c15c60-2dce-4e54-9319-99e143d330c0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789806 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/472afeb7-afee-4a47-ae7f-2c879d9d1dac-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789824 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-config\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789924 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b3f6b3b-4788-41a8-ad23-71d0595da58e-config\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789954 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8tnb\" (UniqueName: \"kubernetes.io/projected/41b77629-2976-425f-b71c-a7f2e9686f11-kube-api-access-h8tnb\") pod \"downloads-7954f5f757-sf5xk\" (UID: \"41b77629-2976-425f-b71c-a7f2e9686f11\") " pod="openshift-console/downloads-7954f5f757-sf5xk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.789976 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhpd2\" (UniqueName: \"kubernetes.io/projected/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-kube-api-access-vhpd2\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790003 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-etcd-serving-ca\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790033 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwszn\" (UniqueName: \"kubernetes.io/projected/da43c1f8-cc18-485d-ba7e-f8761d29584c-kube-api-access-cwszn\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790057 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chlxm\" (UniqueName: \"kubernetes.io/projected/4d800ae3-6269-4dd6-8c80-1f00153084e4-kube-api-access-chlxm\") pod \"migrator-59844c95c7-pwgz6\" (UID: \"4d800ae3-6269-4dd6-8c80-1f00153084e4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790110 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwx44\" (UniqueName: \"kubernetes.io/projected/d7c8b49f-9023-4cf9-b276-525da51e498d-kube-api-access-vwx44\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790162 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-metrics-certs\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790214 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-dir\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: E1002 14:23:16.790247 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.290216546 +0000 UTC m=+148.142071172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790317 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bb2n\" (UniqueName: \"kubernetes.io/projected/1b3f6b3b-4788-41a8-ad23-71d0595da58e-kube-api-access-4bb2n\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790378 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxwn5\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-kube-api-access-zxwn5\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790412 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-encryption-config\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790437 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da43c1f8-cc18-485d-ba7e-f8761d29584c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790478 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-config\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790520 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-default-certificate\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790551 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktwmf\" (UniqueName: \"kubernetes.io/projected/8d956a66-02c1-42f4-8b84-0772796d4ff5-kube-api-access-ktwmf\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790586 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790619 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790692 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ab766803-fd23-476d-a273-ddf3c6dd237b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790739 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-ca\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790781 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/61253815-47e3-4e2c-a2e7-565f128dedef-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790823 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dncvl\" (UniqueName: \"kubernetes.io/projected/e75ba71d-5bb5-435f-b450-d47523b91d73-kube-api-access-dncvl\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790879 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-registry-certificates\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790920 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gllp\" (UniqueName: \"kubernetes.io/projected/400b985e-f209-45b1-afa7-6904803111e5-kube-api-access-8gllp\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790965 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f7796d9-6e2b-4053-8f62-096fc143c1be-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.790997 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/61253815-47e3-4e2c-a2e7-565f128dedef-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791020 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-serving-cert\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791045 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f7796d9-6e2b-4053-8f62-096fc143c1be-config\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791096 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791143 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791174 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791203 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ab766803-fd23-476d-a273-ddf3c6dd237b-serving-cert\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791233 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d956a66-02c1-42f4-8b84-0772796d4ff5-config\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791282 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791303 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b68jw\" (UniqueName: \"kubernetes.io/projected/472afeb7-afee-4a47-ae7f-2c879d9d1dac-kube-api-access-b68jw\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791349 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/da43c1f8-cc18-485d-ba7e-f8761d29584c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791383 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-trusted-ca\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791405 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e75ba71d-5bb5-435f-b450-d47523b91d73-node-pullsecrets\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791425 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-audit\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791445 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791471 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppsk8\" (UniqueName: \"kubernetes.io/projected/b280459a-455a-402b-8f80-48b3d1e4588e-kube-api-access-ppsk8\") pod \"dns-operator-744455d44c-xwrtk\" (UID: \"b280459a-455a-402b-8f80-48b3d1e4588e\") " pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791493 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f7796d9-6e2b-4053-8f62-096fc143c1be-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791537 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-service-ca\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791562 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-stats-auth\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791596 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3c15c60-2dce-4e54-9319-99e143d330c0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791618 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aed4d244-0337-468e-a9d5-d9b0ea805a41-serving-cert\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791652 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2943f5f-caac-4566-b42e-5ef273488f2f-console-serving-cert\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791685 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-service-ca\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791709 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf6zg\" (UniqueName: \"kubernetes.io/projected/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-kube-api-access-wf6zg\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791732 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-registry-tls\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791757 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791801 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791827 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp5b8\" (UniqueName: \"kubernetes.io/projected/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-kube-api-access-vp5b8\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791850 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b280459a-455a-402b-8f80-48b3d1e4588e-metrics-tls\") pod \"dns-operator-744455d44c-xwrtk\" (UID: \"b280459a-455a-402b-8f80-48b3d1e4588e\") " pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791873 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlpgd\" (UniqueName: \"kubernetes.io/projected/aed4d244-0337-468e-a9d5-d9b0ea805a41-kube-api-access-xlpgd\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791900 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpztb\" (UniqueName: \"kubernetes.io/projected/f2943f5f-caac-4566-b42e-5ef273488f2f-kube-api-access-fpztb\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791921 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8d956a66-02c1-42f4-8b84-0772796d4ff5-machine-approver-tls\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.791986 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.792017 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcz2h\" (UniqueName: \"kubernetes.io/projected/c3c15c60-2dce-4e54-9319-99e143d330c0-kube-api-access-zcz2h\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.868143 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893047 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893251 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-service-ca\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: E1002 14:23:16.893315 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.393247026 +0000 UTC m=+148.245101622 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893378 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-stats-auth\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893428 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f7796d9-6e2b-4053-8f62-096fc143c1be-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893456 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t2l8\" (UniqueName: \"kubernetes.io/projected/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-kube-api-access-6t2l8\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893482 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2943f5f-caac-4566-b42e-5ef273488f2f-console-serving-cert\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893501 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf6zg\" (UniqueName: \"kubernetes.io/projected/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-kube-api-access-wf6zg\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893522 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3c15c60-2dce-4e54-9319-99e143d330c0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893540 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aed4d244-0337-468e-a9d5-d9b0ea805a41-serving-cert\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893559 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-registry-tls\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893585 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a1756fd2-89da-4978-9a32-2201fbff47ee-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zr8w6\" (UID: \"a1756fd2-89da-4978-9a32-2201fbff47ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893626 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpztb\" (UniqueName: \"kubernetes.io/projected/f2943f5f-caac-4566-b42e-5ef273488f2f-kube-api-access-fpztb\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893649 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcz2h\" (UniqueName: \"kubernetes.io/projected/c3c15c60-2dce-4e54-9319-99e143d330c0-kube-api-access-zcz2h\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893674 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlpgd\" (UniqueName: \"kubernetes.io/projected/aed4d244-0337-468e-a9d5-d9b0ea805a41-kube-api-access-xlpgd\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893699 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq2tg\" (UniqueName: \"kubernetes.io/projected/a37f04f8-452e-4e02-8604-11e976bb5803-kube-api-access-sq2tg\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893726 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-apiservice-cert\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893747 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsgkd\" (UniqueName: \"kubernetes.io/projected/a28c9a08-8ab6-4fa6-bb25-607170b0b934-kube-api-access-tsgkd\") pod \"ingress-canary-89nf8\" (UID: \"a28c9a08-8ab6-4fa6-bb25-607170b0b934\") " pod="openshift-ingress-canary/ingress-canary-89nf8" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893765 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb1a4ea4-8988-4648-9359-21eca40a8b29-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893781 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb1a4ea4-8988-4648-9359-21eca40a8b29-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893812 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-config\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893835 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/14962662-bec2-4616-b950-69bea84d99d0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893853 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fpbl\" (UniqueName: \"kubernetes.io/projected/33230d31-2ca9-424d-9a04-5e8ab8c04663-kube-api-access-9fpbl\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893869 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/58abdd37-1588-4dce-b4b8-bfa17b906ca4-signing-key\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893884 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/58abdd37-1588-4dce-b4b8-bfa17b906ca4-signing-cabundle\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893907 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/472afeb7-afee-4a47-ae7f-2c879d9d1dac-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893926 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6skkq\" (UniqueName: \"kubernetes.io/projected/4d0c57e4-d5a7-4d9b-adfa-61c2840724b3-kube-api-access-6skkq\") pod \"package-server-manager-789f6589d5-5tf5r\" (UID: \"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893964 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-serving-cert\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.893983 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-plugins-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894006 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-bound-sa-token\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894025 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894045 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqqvd\" (UniqueName: \"kubernetes.io/projected/6fbbc834-46e7-4024-9a5e-9602f4f98138-kube-api-access-hqqvd\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894068 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a37f04f8-452e-4e02-8604-11e976bb5803-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894088 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d956a66-02c1-42f4-8b84-0772796d4ff5-auth-proxy-config\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894110 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1b3f6b3b-4788-41a8-ad23-71d0595da58e-trusted-ca\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894127 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-policies\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894146 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-client-ca\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894167 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkxs8\" (UniqueName: \"kubernetes.io/projected/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-kube-api-access-wkxs8\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894190 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f2943f5f-caac-4566-b42e-5ef273488f2f-console-oauth-config\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894210 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-config\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894231 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/33230d31-2ca9-424d-9a04-5e8ab8c04663-node-bootstrap-token\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894250 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-config\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894267 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brc2k\" (UniqueName: \"kubernetes.io/projected/14962662-bec2-4616-b950-69bea84d99d0-kube-api-access-brc2k\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894292 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894311 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-console-config\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894332 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-tmpfs\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894350 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-image-import-ca\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894369 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3c15c60-2dce-4e54-9319-99e143d330c0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894387 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/472afeb7-afee-4a47-ae7f-2c879d9d1dac-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894406 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5fhp\" (UniqueName: \"kubernetes.io/projected/75ea49ac-bda7-4812-8107-d79902f26b2c-kube-api-access-p5fhp\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894428 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b3f6b3b-4788-41a8-ad23-71d0595da58e-config\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894446 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8tnb\" (UniqueName: \"kubernetes.io/projected/41b77629-2976-425f-b71c-a7f2e9686f11-kube-api-access-h8tnb\") pod \"downloads-7954f5f757-sf5xk\" (UID: \"41b77629-2976-425f-b71c-a7f2e9686f11\") " pod="openshift-console/downloads-7954f5f757-sf5xk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894465 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-etcd-serving-ca\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894485 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwx44\" (UniqueName: \"kubernetes.io/projected/d7c8b49f-9023-4cf9-b276-525da51e498d-kube-api-access-vwx44\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894506 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7hcl\" (UniqueName: \"kubernetes.io/projected/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-kube-api-access-r7hcl\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894522 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb1a4ea4-8988-4648-9359-21eca40a8b29-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894542 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/75ea49ac-bda7-4812-8107-d79902f26b2c-srv-cert\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894568 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-metrics-certs\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894589 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-csi-data-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894607 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4d0c57e4-d5a7-4d9b-adfa-61c2840724b3-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5tf5r\" (UID: \"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894624 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/14962662-bec2-4616-b950-69bea84d99d0-proxy-tls\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894651 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjph4\" (UniqueName: \"kubernetes.io/projected/7d5177ac-fd1e-4f0a-822a-cb329a91829a-kube-api-access-tjph4\") pod \"multus-admission-controller-857f4d67dd-x9gbl\" (UID: \"7d5177ac-fd1e-4f0a-822a-cb329a91829a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894678 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/75ea49ac-bda7-4812-8107-d79902f26b2c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894682 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3c15c60-2dce-4e54-9319-99e143d330c0-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894698 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da43c1f8-cc18-485d-ba7e-f8761d29584c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894718 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-metrics-tls\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894742 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-ca\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894768 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894790 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad9781fa-330d-4741-b182-0fdf0d1c394d-secret-volume\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894814 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dncvl\" (UniqueName: \"kubernetes.io/projected/e75ba71d-5bb5-435f-b450-d47523b91d73-kube-api-access-dncvl\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894833 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/61253815-47e3-4e2c-a2e7-565f128dedef-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894849 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-serving-cert\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894867 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894885 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwwb8\" (UniqueName: \"kubernetes.io/projected/58abdd37-1588-4dce-b4b8-bfa17b906ca4-kube-api-access-wwwb8\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894915 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ab766803-fd23-476d-a273-ddf3c6dd237b-serving-cert\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894952 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e75ba71d-5bb5-435f-b450-d47523b91d73-node-pullsecrets\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894973 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.894994 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct8cd\" (UniqueName: \"kubernetes.io/projected/a1756fd2-89da-4978-9a32-2201fbff47ee-kube-api-access-ct8cd\") pod \"control-plane-machine-set-operator-78cbb6b69f-zr8w6\" (UID: \"a1756fd2-89da-4978-9a32-2201fbff47ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895011 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a37f04f8-452e-4e02-8604-11e976bb5803-proxy-tls\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895030 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppsk8\" (UniqueName: \"kubernetes.io/projected/b280459a-455a-402b-8f80-48b3d1e4588e-kube-api-access-ppsk8\") pod \"dns-operator-744455d44c-xwrtk\" (UID: \"b280459a-455a-402b-8f80-48b3d1e4588e\") " pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895049 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-service-ca\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895068 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895087 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp5b8\" (UniqueName: \"kubernetes.io/projected/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-kube-api-access-vp5b8\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895116 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895133 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6fbbc834-46e7-4024-9a5e-9602f4f98138-profile-collector-cert\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895152 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/33230d31-2ca9-424d-9a04-5e8ab8c04663-certs\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895173 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8d956a66-02c1-42f4-8b84-0772796d4ff5-machine-approver-tls\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895193 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895212 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b280459a-455a-402b-8f80-48b3d1e4588e-metrics-tls\") pod \"dns-operator-744455d44c-xwrtk\" (UID: \"b280459a-455a-402b-8f80-48b3d1e4588e\") " pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895233 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-serving-cert\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895250 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-config\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895272 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-trusted-ca\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.895332 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-policies\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.896306 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/61253815-47e3-4e2c-a2e7-565f128dedef-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.896519 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/472afeb7-afee-4a47-ae7f-2c879d9d1dac-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:16 crc kubenswrapper[4717]: E1002 14:23:16.896672 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.396653127 +0000 UTC m=+148.248507733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.896782 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-client-ca\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.896814 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d956a66-02c1-42f4-8b84-0772796d4ff5-auth-proxy-config\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.896926 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-service-ca\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.897269 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-service-ca\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.897387 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1b3f6b3b-4788-41a8-ad23-71d0595da58e-trusted-ca\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.898542 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.898710 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-stats-auth\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.899439 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.899706 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da43c1f8-cc18-485d-ba7e-f8761d29584c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.899922 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-console-config\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.900720 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.901876 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-etcd-serving-ca\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.902390 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aed4d244-0337-468e-a9d5-d9b0ea805a41-serving-cert\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.902506 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/472afeb7-afee-4a47-ae7f-2c879d9d1dac-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.902695 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-image-import-ca\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.903145 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-config\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.903291 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-trusted-ca-bundle\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.903382 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f2943f5f-caac-4566-b42e-5ef273488f2f-console-oauth-config\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.904059 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.904095 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-config\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.904551 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-ca\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.904683 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e75ba71d-5bb5-435f-b450-d47523b91d73-node-pullsecrets\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.904854 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f2943f5f-caac-4566-b42e-5ef273488f2f-console-serving-cert\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905351 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b3f6b3b-4788-41a8-ad23-71d0595da58e-config\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905424 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/400b985e-f209-45b1-afa7-6904803111e5-serving-cert\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905461 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7d5177ac-fd1e-4f0a-822a-cb329a91829a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-x9gbl\" (UID: \"7d5177ac-fd1e-4f0a-822a-cb329a91829a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905488 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-trusted-ca-bundle\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905517 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-serving-cert\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905551 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrjtl\" (UniqueName: \"kubernetes.io/projected/ab766803-fd23-476d-a273-ddf3c6dd237b-kube-api-access-jrjtl\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905575 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905598 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpk6x\" (UniqueName: \"kubernetes.io/projected/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-kube-api-access-fpk6x\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905674 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-config\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905745 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-etcd-client\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.906269 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3c15c60-2dce-4e54-9319-99e143d330c0-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.906456 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-trusted-ca-bundle\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.906727 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.906764 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/8d956a66-02c1-42f4-8b84-0772796d4ff5-machine-approver-tls\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.905790 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgqxl\" (UniqueName: \"kubernetes.io/projected/ad9781fa-330d-4741-b182-0fdf0d1c394d-kube-api-access-rgqxl\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.906980 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a37f04f8-452e-4e02-8604-11e976bb5803-images\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907036 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907072 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da43c1f8-cc18-485d-ba7e-f8761d29584c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907211 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-mountpoint-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907250 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907299 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7c8b49f-9023-4cf9-b276-525da51e498d-service-ca-bundle\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907367 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e75ba71d-5bb5-435f-b450-d47523b91d73-audit-dir\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907395 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b3f6b3b-4788-41a8-ad23-71d0595da58e-serving-cert\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907442 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.907800 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ab766803-fd23-476d-a273-ddf3c6dd237b-serving-cert\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.908100 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.908145 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e75ba71d-5bb5-435f-b450-d47523b91d73-audit-dir\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.908679 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d7c8b49f-9023-4cf9-b276-525da51e498d-service-ca-bundle\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.908712 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.908760 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.908804 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/aa4a4f1e-edd6-430a-900d-ce172449b50b-metrics-tls\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909164 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f92km\" (UniqueName: \"kubernetes.io/projected/aa4a4f1e-edd6-430a-900d-ce172449b50b-kube-api-access-f92km\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909234 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-oauth-serving-cert\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909268 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-service-ca-bundle\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909357 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909392 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909419 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q75n\" (UniqueName: \"kubernetes.io/projected/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-kube-api-access-6q75n\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909491 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-serving-cert\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909907 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-service-ca-bundle\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.909987 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-client\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.910021 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6fbbc834-46e7-4024-9a5e-9602f4f98138-srv-cert\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.910083 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.910116 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-config\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.910144 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa4a4f1e-edd6-430a-900d-ce172449b50b-config-volume\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.912227 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f7796d9-6e2b-4053-8f62-096fc143c1be-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.912276 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhpd2\" (UniqueName: \"kubernetes.io/projected/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-kube-api-access-vhpd2\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.912383 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwszn\" (UniqueName: \"kubernetes.io/projected/da43c1f8-cc18-485d-ba7e-f8761d29584c-kube-api-access-cwszn\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.912503 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chlxm\" (UniqueName: \"kubernetes.io/projected/4d800ae3-6269-4dd6-8c80-1f00153084e4-kube-api-access-chlxm\") pod \"migrator-59844c95c7-pwgz6\" (UID: \"4d800ae3-6269-4dd6-8c80-1f00153084e4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.912541 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-registration-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.912813 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-dir\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913102 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913224 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxwn5\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-kube-api-access-zxwn5\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913271 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-config\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913315 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-encryption-config\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913356 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bb2n\" (UniqueName: \"kubernetes.io/projected/1b3f6b3b-4788-41a8-ad23-71d0595da58e-kube-api-access-4bb2n\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913472 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-dir\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913720 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-default-certificate\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913782 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktwmf\" (UniqueName: \"kubernetes.io/projected/8d956a66-02c1-42f4-8b84-0772796d4ff5-kube-api-access-ktwmf\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913804 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-registry-tls\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913828 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.913807 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-config\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914008 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ab766803-fd23-476d-a273-ddf3c6dd237b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914046 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914113 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-socket-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914147 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-webhook-cert\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914191 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-registry-certificates\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914220 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/61253815-47e3-4e2c-a2e7-565f128dedef-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914243 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gllp\" (UniqueName: \"kubernetes.io/projected/400b985e-f209-45b1-afa7-6904803111e5-kube-api-access-8gllp\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914312 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f7796d9-6e2b-4053-8f62-096fc143c1be-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914348 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914367 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914388 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f7796d9-6e2b-4053-8f62-096fc143c1be-config\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914409 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d956a66-02c1-42f4-8b84-0772796d4ff5-config\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914425 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914515 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b68jw\" (UniqueName: \"kubernetes.io/projected/472afeb7-afee-4a47-ae7f-2c879d9d1dac-kube-api-access-b68jw\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914534 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/400b985e-f209-45b1-afa7-6904803111e5-config\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914542 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914570 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad9781fa-330d-4741-b182-0fdf0d1c394d-config-volume\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914602 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/da43c1f8-cc18-485d-ba7e-f8761d29584c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914626 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a28c9a08-8ab6-4fa6-bb25-607170b0b934-cert\") pod \"ingress-canary-89nf8\" (UID: \"a28c9a08-8ab6-4fa6-bb25-607170b0b934\") " pod="openshift-ingress-canary/ingress-canary-89nf8" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914663 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-trusted-ca\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914687 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-audit\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.914843 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1b3f6b3b-4788-41a8-ad23-71d0595da58e-serving-cert\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.915213 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-etcd-client\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.915331 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/e75ba71d-5bb5-435f-b450-d47523b91d73-audit\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.916423 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/400b985e-f209-45b1-afa7-6904803111e5-serving-cert\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.916772 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-metrics-certs\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.916774 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ab766803-fd23-476d-a273-ddf3c6dd237b-available-featuregates\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.918084 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d956a66-02c1-42f4-8b84-0772796d4ff5-config\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.918976 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f7796d9-6e2b-4053-8f62-096fc143c1be-config\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.919042 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-registry-certificates\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.919354 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.919592 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-trusted-ca\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.919697 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b280459a-455a-402b-8f80-48b3d1e4588e-metrics-tls\") pod \"dns-operator-744455d44c-xwrtk\" (UID: \"b280459a-455a-402b-8f80-48b3d1e4588e\") " pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.920376 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f2943f5f-caac-4566-b42e-5ef273488f2f-oauth-serving-cert\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.922294 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.922530 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-encryption-config\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.923090 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.923294 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.923544 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e75ba71d-5bb5-435f-b450-d47523b91d73-etcd-client\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.925360 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/da43c1f8-cc18-485d-ba7e-f8761d29584c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.925813 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d7c8b49f-9023-4cf9-b276-525da51e498d-default-certificate\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.925917 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/61253815-47e3-4e2c-a2e7-565f128dedef-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.926255 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.926558 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.945552 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf6zg\" (UniqueName: \"kubernetes.io/projected/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-kube-api-access-wf6zg\") pod \"oauth-openshift-558db77b4-qmztn\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.952110 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gkzjm"] Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.963308 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-bound-sa-token\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.981856 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dncvl\" (UniqueName: \"kubernetes.io/projected/e75ba71d-5bb5-435f-b450-d47523b91d73-kube-api-access-dncvl\") pod \"apiserver-76f77b778f-cqd52\" (UID: \"e75ba71d-5bb5-435f-b450-d47523b91d73\") " pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:16 crc kubenswrapper[4717]: I1002 14:23:16.992127 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.004256 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f614f493-81ac-4fa9-bb29-ee9fd24e8d5f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9gcmm\" (UID: \"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.015380 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.015603 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-plugins-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.015628 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkxs8\" (UniqueName: \"kubernetes.io/projected/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-kube-api-access-wkxs8\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.015648 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqqvd\" (UniqueName: \"kubernetes.io/projected/6fbbc834-46e7-4024-9a5e-9602f4f98138-kube-api-access-hqqvd\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.015674 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a37f04f8-452e-4e02-8604-11e976bb5803-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.015695 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/33230d31-2ca9-424d-9a04-5e8ab8c04663-node-bootstrap-token\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.015715 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brc2k\" (UniqueName: \"kubernetes.io/projected/14962662-bec2-4616-b950-69bea84d99d0-kube-api-access-brc2k\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.016010 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.515974552 +0000 UTC m=+148.367828988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016298 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-tmpfs\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016330 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5fhp\" (UniqueName: \"kubernetes.io/projected/75ea49ac-bda7-4812-8107-d79902f26b2c-kube-api-access-p5fhp\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016374 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7hcl\" (UniqueName: \"kubernetes.io/projected/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-kube-api-access-r7hcl\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016394 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb1a4ea4-8988-4648-9359-21eca40a8b29-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016398 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-plugins-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016419 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-csi-data-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016561 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/75ea49ac-bda7-4812-8107-d79902f26b2c-srv-cert\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016596 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4d0c57e4-d5a7-4d9b-adfa-61c2840724b3-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5tf5r\" (UID: \"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016622 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/14962662-bec2-4616-b950-69bea84d99d0-proxy-tls\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016632 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-csi-data-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.016644 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjph4\" (UniqueName: \"kubernetes.io/projected/7d5177ac-fd1e-4f0a-822a-cb329a91829a-kube-api-access-tjph4\") pod \"multus-admission-controller-857f4d67dd-x9gbl\" (UID: \"7d5177ac-fd1e-4f0a-822a-cb329a91829a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.017194 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-tmpfs\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.017611 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/a37f04f8-452e-4e02-8604-11e976bb5803-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.017773 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/75ea49ac-bda7-4812-8107-d79902f26b2c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.017805 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-metrics-tls\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.017870 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad9781fa-330d-4741-b182-0fdf0d1c394d-secret-volume\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.017922 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwwb8\" (UniqueName: \"kubernetes.io/projected/58abdd37-1588-4dce-b4b8-bfa17b906ca4-kube-api-access-wwwb8\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.019448 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-d6qrw"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.020319 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct8cd\" (UniqueName: \"kubernetes.io/projected/a1756fd2-89da-4978-9a32-2201fbff47ee-kube-api-access-ct8cd\") pod \"control-plane-machine-set-operator-78cbb6b69f-zr8w6\" (UID: \"a1756fd2-89da-4978-9a32-2201fbff47ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.020464 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a37f04f8-452e-4e02-8604-11e976bb5803-proxy-tls\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021607 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6fbbc834-46e7-4024-9a5e-9602f4f98138-profile-collector-cert\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021630 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/33230d31-2ca9-424d-9a04-5e8ab8c04663-certs\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021659 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-serving-cert\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021683 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-config\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021712 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-trusted-ca\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021737 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7d5177ac-fd1e-4f0a-822a-cb329a91829a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-x9gbl\" (UID: \"7d5177ac-fd1e-4f0a-822a-cb329a91829a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021796 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpk6x\" (UniqueName: \"kubernetes.io/projected/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-kube-api-access-fpk6x\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021815 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a37f04f8-452e-4e02-8604-11e976bb5803-images\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021839 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgqxl\" (UniqueName: \"kubernetes.io/projected/ad9781fa-330d-4741-b182-0fdf0d1c394d-kube-api-access-rgqxl\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021878 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021911 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-mountpoint-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.021965 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/aa4a4f1e-edd6-430a-900d-ce172449b50b-metrics-tls\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022005 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f92km\" (UniqueName: \"kubernetes.io/projected/aa4a4f1e-edd6-430a-900d-ce172449b50b-kube-api-access-f92km\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022050 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022072 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q75n\" (UniqueName: \"kubernetes.io/projected/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-kube-api-access-6q75n\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022120 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6fbbc834-46e7-4024-9a5e-9602f4f98138-srv-cert\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022143 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-registration-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022158 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa4a4f1e-edd6-430a-900d-ce172449b50b-config-volume\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022287 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-webhook-cert\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022311 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-socket-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022362 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a28c9a08-8ab6-4fa6-bb25-607170b0b934-cert\") pod \"ingress-canary-89nf8\" (UID: \"a28c9a08-8ab6-4fa6-bb25-607170b0b934\") " pod="openshift-ingress-canary/ingress-canary-89nf8" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022377 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022393 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad9781fa-330d-4741-b182-0fdf0d1c394d-config-volume\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022427 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t2l8\" (UniqueName: \"kubernetes.io/projected/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-kube-api-access-6t2l8\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022465 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a1756fd2-89da-4978-9a32-2201fbff47ee-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zr8w6\" (UID: \"a1756fd2-89da-4978-9a32-2201fbff47ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022683 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-registration-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.022870 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb1a4ea4-8988-4648-9359-21eca40a8b29-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023065 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq2tg\" (UniqueName: \"kubernetes.io/projected/a37f04f8-452e-4e02-8604-11e976bb5803-kube-api-access-sq2tg\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023091 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-apiservice-cert\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023118 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsgkd\" (UniqueName: \"kubernetes.io/projected/a28c9a08-8ab6-4fa6-bb25-607170b0b934-kube-api-access-tsgkd\") pod \"ingress-canary-89nf8\" (UID: \"a28c9a08-8ab6-4fa6-bb25-607170b0b934\") " pod="openshift-ingress-canary/ingress-canary-89nf8" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023139 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/58abdd37-1588-4dce-b4b8-bfa17b906ca4-signing-cabundle\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023163 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb1a4ea4-8988-4648-9359-21eca40a8b29-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023185 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb1a4ea4-8988-4648-9359-21eca40a8b29-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023210 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/14962662-bec2-4616-b950-69bea84d99d0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023232 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fpbl\" (UniqueName: \"kubernetes.io/projected/33230d31-2ca9-424d-9a04-5e8ab8c04663-kube-api-access-9fpbl\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023248 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/58abdd37-1588-4dce-b4b8-bfa17b906ca4-signing-key\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023269 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6skkq\" (UniqueName: \"kubernetes.io/projected/4d0c57e4-d5a7-4d9b-adfa-61c2840724b3-kube-api-access-6skkq\") pod \"package-server-manager-789f6589d5-5tf5r\" (UID: \"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023794 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.023975 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-mountpoint-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.024577 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad9781fa-330d-4741-b182-0fdf0d1c394d-config-volume\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.024841 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4d0c57e4-d5a7-4d9b-adfa-61c2840724b3-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5tf5r\" (UID: \"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.024916 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-socket-dir\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.025296 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/75ea49ac-bda7-4812-8107-d79902f26b2c-srv-cert\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.028056 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad9781fa-330d-4741-b182-0fdf0d1c394d-secret-volume\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.028302 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-trusted-ca\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.028310 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa4a4f1e-edd6-430a-900d-ce172449b50b-config-volume\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.028373 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-config\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.028539 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/58abdd37-1588-4dce-b4b8-bfa17b906ca4-signing-cabundle\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.028652 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a37f04f8-452e-4e02-8604-11e976bb5803-images\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.028980 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/14962662-bec2-4616-b950-69bea84d99d0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.029957 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.030857 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6fbbc834-46e7-4024-9a5e-9602f4f98138-profile-collector-cert\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.031688 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-apiservice-cert\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.035146 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb1a4ea4-8988-4648-9359-21eca40a8b29-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.036598 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/58abdd37-1588-4dce-b4b8-bfa17b906ca4-signing-key\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.036911 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/33230d31-2ca9-424d-9a04-5e8ab8c04663-certs\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.037411 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a28c9a08-8ab6-4fa6-bb25-607170b0b934-cert\") pod \"ingress-canary-89nf8\" (UID: \"a28c9a08-8ab6-4fa6-bb25-607170b0b934\") " pod="openshift-ingress-canary/ingress-canary-89nf8" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.038177 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/a37f04f8-452e-4e02-8604-11e976bb5803-proxy-tls\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.039413 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-webhook-cert\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.039449 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/a1756fd2-89da-4978-9a32-2201fbff47ee-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zr8w6\" (UID: \"a1756fd2-89da-4978-9a32-2201fbff47ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.039889 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7d5177ac-fd1e-4f0a-822a-cb329a91829a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-x9gbl\" (UID: \"7d5177ac-fd1e-4f0a-822a-cb329a91829a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.040696 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/33230d31-2ca9-424d-9a04-5e8ab8c04663-node-bootstrap-token\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.041262 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6fbbc834-46e7-4024-9a5e-9602f4f98138-srv-cert\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.041319 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/aa4a4f1e-edd6-430a-900d-ce172449b50b-metrics-tls\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.041326 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/14962662-bec2-4616-b950-69bea84d99d0-proxy-tls\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.042151 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/75ea49ac-bda7-4812-8107-d79902f26b2c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.042231 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-metrics-tls\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.042246 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpztb\" (UniqueName: \"kubernetes.io/projected/f2943f5f-caac-4566-b42e-5ef273488f2f-kube-api-access-fpztb\") pod \"console-f9d7485db-r4gdr\" (UID: \"f2943f5f-caac-4566-b42e-5ef273488f2f\") " pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.050157 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.051906 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-serving-cert\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.060819 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppsk8\" (UniqueName: \"kubernetes.io/projected/b280459a-455a-402b-8f80-48b3d1e4588e-kube-api-access-ppsk8\") pod \"dns-operator-744455d44c-xwrtk\" (UID: \"b280459a-455a-402b-8f80-48b3d1e4588e\") " pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.061986 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcz2h\" (UniqueName: \"kubernetes.io/projected/c3c15c60-2dce-4e54-9319-99e143d330c0-kube-api-access-zcz2h\") pod \"openshift-controller-manager-operator-756b6f6bc6-4nwjx\" (UID: \"c3c15c60-2dce-4e54-9319-99e143d330c0\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.085029 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwx44\" (UniqueName: \"kubernetes.io/projected/d7c8b49f-9023-4cf9-b276-525da51e498d-kube-api-access-vwx44\") pod \"router-default-5444994796-l2f4x\" (UID: \"d7c8b49f-9023-4cf9-b276-525da51e498d\") " pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.106205 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp5b8\" (UniqueName: \"kubernetes.io/projected/548b2b20-6dc1-4d5e-a3be-33d278e22b7c-kube-api-access-vp5b8\") pod \"openshift-apiserver-operator-796bbdcf4f-f4cx4\" (UID: \"548b2b20-6dc1-4d5e-a3be-33d278e22b7c\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.106703 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.119200 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8tnb\" (UniqueName: \"kubernetes.io/projected/41b77629-2976-425f-b71c-a7f2e9686f11-kube-api-access-h8tnb\") pod \"downloads-7954f5f757-sf5xk\" (UID: \"41b77629-2976-425f-b71c-a7f2e9686f11\") " pod="openshift-console/downloads-7954f5f757-sf5xk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.125016 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.125431 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.625419403 +0000 UTC m=+148.477273849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.136574 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.142756 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlpgd\" (UniqueName: \"kubernetes.io/projected/aed4d244-0337-468e-a9d5-d9b0ea805a41-kube-api-access-xlpgd\") pod \"route-controller-manager-6576b87f9c-5zkkr\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.146855 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.156621 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.162953 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrjtl\" (UniqueName: \"kubernetes.io/projected/ab766803-fd23-476d-a273-ddf3c6dd237b-kube-api-access-jrjtl\") pod \"openshift-config-operator-7777fb866f-wn65n\" (UID: \"ab766803-fd23-476d-a273-ddf3c6dd237b\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.189643 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da43c1f8-cc18-485d-ba7e-f8761d29584c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:17 crc kubenswrapper[4717]: W1002 14:23:17.198226 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd7c8b49f_9023_4cf9_b276_525da51e498d.slice/crio-475330cf987587b23b614f9d64a7d9ac6fe460432c387f323b4f3f8b0d83e9f2 WatchSource:0}: Error finding container 475330cf987587b23b614f9d64a7d9ac6fe460432c387f323b4f3f8b0d83e9f2: Status 404 returned error can't find the container with id 475330cf987587b23b614f9d64a7d9ac6fe460432c387f323b4f3f8b0d83e9f2 Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.200174 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhpd2\" (UniqueName: \"kubernetes.io/projected/afb1dfea-fc57-4b25-a65b-0bc13a7c861a-kube-api-access-vhpd2\") pod \"etcd-operator-b45778765-n9m2d\" (UID: \"afb1dfea-fc57-4b25-a65b-0bc13a7c861a\") " pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.215128 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.224848 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwszn\" (UniqueName: \"kubernetes.io/projected/da43c1f8-cc18-485d-ba7e-f8761d29584c-kube-api-access-cwszn\") pod \"cluster-image-registry-operator-dc59b4c8b-vbgxl\" (UID: \"da43c1f8-cc18-485d-ba7e-f8761d29584c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.225491 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.225688 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.725669547 +0000 UTC m=+148.577523993 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.225793 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.226350 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.726339475 +0000 UTC m=+148.578193921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.229568 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.237097 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-sf5xk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.240019 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-cqd52"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.242489 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chlxm\" (UniqueName: \"kubernetes.io/projected/4d800ae3-6269-4dd6-8c80-1f00153084e4-kube-api-access-chlxm\") pod \"migrator-59844c95c7-pwgz6\" (UID: \"4d800ae3-6269-4dd6-8c80-1f00153084e4\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.247474 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.256363 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.259909 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bb2n\" (UniqueName: \"kubernetes.io/projected/1b3f6b3b-4788-41a8-ad23-71d0595da58e-kube-api-access-4bb2n\") pod \"console-operator-58897d9998-2z55s\" (UID: \"1b3f6b3b-4788-41a8-ad23-71d0595da58e\") " pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.284168 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f7796d9-6e2b-4053-8f62-096fc143c1be-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9jx64\" (UID: \"1f7796d9-6e2b-4053-8f62-096fc143c1be\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.289651 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.299415 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.311130 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.313150 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.327192 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.328126 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.82810595 +0000 UTC m=+148.679960396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.329815 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktwmf\" (UniqueName: \"kubernetes.io/projected/8d956a66-02c1-42f4-8b84-0772796d4ff5-kube-api-access-ktwmf\") pod \"machine-approver-56656f9798-2vzcx\" (UID: \"8d956a66-02c1-42f4-8b84-0772796d4ff5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.341090 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gllp\" (UniqueName: \"kubernetes.io/projected/400b985e-f209-45b1-afa7-6904803111e5-kube-api-access-8gllp\") pod \"authentication-operator-69f744f599-h6pmp\" (UID: \"400b985e-f209-45b1-afa7-6904803111e5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.362209 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b68jw\" (UniqueName: \"kubernetes.io/projected/472afeb7-afee-4a47-ae7f-2c879d9d1dac-kube-api-access-b68jw\") pod \"kube-storage-version-migrator-operator-b67b599dd-s2bkr\" (UID: \"472afeb7-afee-4a47-ae7f-2c879d9d1dac\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.366244 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.379823 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.382726 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxwn5\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-kube-api-access-zxwn5\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.407298 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqqvd\" (UniqueName: \"kubernetes.io/projected/6fbbc834-46e7-4024-9a5e-9602f4f98138-kube-api-access-hqqvd\") pod \"catalog-operator-68c6474976-2cdqg\" (UID: \"6fbbc834-46e7-4024-9a5e-9602f4f98138\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.422070 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.424875 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qmztn"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.431446 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.432347 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:17.932324501 +0000 UTC m=+148.784178947 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.440596 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5fhp\" (UniqueName: \"kubernetes.io/projected/75ea49ac-bda7-4812-8107-d79902f26b2c-kube-api-access-p5fhp\") pod \"olm-operator-6b444d44fb-f8mgk\" (UID: \"75ea49ac-bda7-4812-8107-d79902f26b2c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.451942 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brc2k\" (UniqueName: \"kubernetes.io/projected/14962662-bec2-4616-b950-69bea84d99d0-kube-api-access-brc2k\") pod \"machine-config-controller-84d6567774-r5w5f\" (UID: \"14962662-bec2-4616-b950-69bea84d99d0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.468695 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7hcl\" (UniqueName: \"kubernetes.io/projected/b45535f7-1aaf-48bc-bd92-6ee1f56efe78-kube-api-access-r7hcl\") pod \"csi-hostpathplugin-4mg6p\" (UID: \"b45535f7-1aaf-48bc-bd92-6ee1f56efe78\") " pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: W1002 14:23:17.475838 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9329366a_9fdd_45dd_9c5d_6139f6cc64c2.slice/crio-820d6cda793f1830cde4ff089bb8ad30d05546dd9bd4c72deabb8b5f946d70d6 WatchSource:0}: Error finding container 820d6cda793f1830cde4ff089bb8ad30d05546dd9bd4c72deabb8b5f946d70d6: Status 404 returned error can't find the container with id 820d6cda793f1830cde4ff089bb8ad30d05546dd9bd4c72deabb8b5f946d70d6 Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.476584 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.506817 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.508722 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjph4\" (UniqueName: \"kubernetes.io/projected/7d5177ac-fd1e-4f0a-822a-cb329a91829a-kube-api-access-tjph4\") pod \"multus-admission-controller-857f4d67dd-x9gbl\" (UID: \"7d5177ac-fd1e-4f0a-822a-cb329a91829a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.509413 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkxs8\" (UniqueName: \"kubernetes.io/projected/b5768c1b-0a6c-4683-8793-bcba4d0d07d9-kube-api-access-wkxs8\") pod \"service-ca-operator-777779d784-xwvdw\" (UID: \"b5768c1b-0a6c-4683-8793-bcba4d0d07d9\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.523089 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.524085 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" event={"ID":"279ce48f-4f2a-407a-bec4-2e0752b71c2c","Type":"ContainerStarted","Data":"13ab486366ecee8463a6afd7ab2d53b6639a0bc8405f8f7252cb9fc467c60c07"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.530355 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwwb8\" (UniqueName: \"kubernetes.io/projected/58abdd37-1588-4dce-b4b8-bfa17b906ca4-kube-api-access-wwwb8\") pod \"service-ca-9c57cc56f-kwgnk\" (UID: \"58abdd37-1588-4dce-b4b8-bfa17b906ca4\") " pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.532108 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" event={"ID":"95017b18-3508-46ed-a3a7-a6834d5ada15","Type":"ContainerStarted","Data":"cc8bc0271c6649a75ec394edbb1e7f265113cdc4291cb7b735eaf9b0484351c6"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.532228 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" event={"ID":"95017b18-3508-46ed-a3a7-a6834d5ada15","Type":"ContainerStarted","Data":"1fdc3b5fe2d9cc67f27b78730b515ff255b3e3b1df9246c66143b84a8447e8ca"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.532325 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" event={"ID":"95017b18-3508-46ed-a3a7-a6834d5ada15","Type":"ContainerStarted","Data":"1b6c5974df64e8375859246feb9033c9d22c623bc43333fb1eeebb65d8569bb8"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.532709 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.532887 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.032868013 +0000 UTC m=+148.884722459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.533358 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.533706 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.033695215 +0000 UTC m=+148.885549671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.544070 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct8cd\" (UniqueName: \"kubernetes.io/projected/a1756fd2-89da-4978-9a32-2201fbff47ee-kube-api-access-ct8cd\") pod \"control-plane-machine-set-operator-78cbb6b69f-zr8w6\" (UID: \"a1756fd2-89da-4978-9a32-2201fbff47ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.559501 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" event={"ID":"4ee2df25-8a54-4608-b82e-41edda414d2b","Type":"ContainerStarted","Data":"191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.559802 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" event={"ID":"4ee2df25-8a54-4608-b82e-41edda414d2b","Type":"ContainerStarted","Data":"7ddb797a4c90a33ebdb9948910e7a9de2b9fbbd95b3201575aa497483b76d2ad"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.561365 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.567237 4717 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-gkzjm container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.567303 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" podUID="4ee2df25-8a54-4608-b82e-41edda414d2b" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.572750 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-l2f4x" event={"ID":"d7c8b49f-9023-4cf9-b276-525da51e498d","Type":"ContainerStarted","Data":"a1c29cdee0cbb8061b131074835a3d9f70b2816ffb09e54b0b134c21ace94035"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.572813 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-l2f4x" event={"ID":"d7c8b49f-9023-4cf9-b276-525da51e498d","Type":"ContainerStarted","Data":"475330cf987587b23b614f9d64a7d9ac6fe460432c387f323b4f3f8b0d83e9f2"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.575344 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgqxl\" (UniqueName: \"kubernetes.io/projected/ad9781fa-330d-4741-b182-0fdf0d1c394d-kube-api-access-rgqxl\") pod \"collect-profiles-29323575-wfd6r\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.592497 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f92km\" (UniqueName: \"kubernetes.io/projected/aa4a4f1e-edd6-430a-900d-ce172449b50b-kube-api-access-f92km\") pod \"dns-default-xcjdc\" (UID: \"aa4a4f1e-edd6-430a-900d-ce172449b50b\") " pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.600045 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-bound-sa-token\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.606563 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" event={"ID":"9329366a-9fdd-45dd-9c5d-6139f6cc64c2","Type":"ContainerStarted","Data":"820d6cda793f1830cde4ff089bb8ad30d05546dd9bd4c72deabb8b5f946d70d6"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.611211 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" event={"ID":"e75ba71d-5bb5-435f-b450-d47523b91d73","Type":"ContainerStarted","Data":"85001e9afdcf334781f97f107930ac1cd45a3f1b42e990600732e85529ba7e3e"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.616871 4717 generic.go:334] "Generic (PLEG): container finished" podID="3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3" containerID="37acda5522ca45b55a9dfa08758f9914d4ad6644ccd12f57a811d65077b6c8c4" exitCode=0 Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.616918 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" event={"ID":"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3","Type":"ContainerDied","Data":"37acda5522ca45b55a9dfa08758f9914d4ad6644ccd12f57a811d65077b6c8c4"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.617054 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" event={"ID":"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3","Type":"ContainerStarted","Data":"eacd426f0848cf2e832656145a5ac8b33e57584e6c11635c96329526842b70fd"} Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.623294 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.624208 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q75n\" (UniqueName: \"kubernetes.io/projected/65bfcfc1-e7e3-4058-b4bd-5d987a274ba4-kube-api-access-6q75n\") pod \"packageserver-d55dfcdfc-vvxk5\" (UID: \"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.634024 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.638671 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.638817 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.138787719 +0000 UTC m=+148.990642185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.639072 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.639828 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.139816767 +0000 UTC m=+148.991671213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.644966 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t2l8\" (UniqueName: \"kubernetes.io/projected/36b7da2e-056d-4b81-b1ae-1c919d74c7a8-kube-api-access-6t2l8\") pod \"ingress-operator-5b745b69d9-mptt7\" (UID: \"36b7da2e-056d-4b81-b1ae-1c919d74c7a8\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.674119 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.677241 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.680890 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpk6x\" (UniqueName: \"kubernetes.io/projected/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-kube-api-access-fpk6x\") pod \"marketplace-operator-79b997595-8rncj\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.683197 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.695031 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.699924 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq2tg\" (UniqueName: \"kubernetes.io/projected/a37f04f8-452e-4e02-8604-11e976bb5803-kube-api-access-sq2tg\") pod \"machine-config-operator-74547568cd-l97pk\" (UID: \"a37f04f8-452e-4e02-8604-11e976bb5803\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.706987 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.713792 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsgkd\" (UniqueName: \"kubernetes.io/projected/a28c9a08-8ab6-4fa6-bb25-607170b0b934-kube-api-access-tsgkd\") pod \"ingress-canary-89nf8\" (UID: \"a28c9a08-8ab6-4fa6-bb25-607170b0b934\") " pod="openshift-ingress-canary/ingress-canary-89nf8" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.719180 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.726363 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.735824 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fpbl\" (UniqueName: \"kubernetes.io/projected/33230d31-2ca9-424d-9a04-5e8ab8c04663-kube-api-access-9fpbl\") pod \"machine-config-server-p76lr\" (UID: \"33230d31-2ca9-424d-9a04-5e8ab8c04663\") " pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.744880 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-n9m2d"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.745497 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.747428 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.749057 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.249030951 +0000 UTC m=+149.100885457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.755856 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-r4gdr"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.758276 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.759632 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb1a4ea4-8988-4648-9359-21eca40a8b29-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h7s9n\" (UID: \"cb1a4ea4-8988-4648-9359-21eca40a8b29\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.760302 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.769629 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6skkq\" (UniqueName: \"kubernetes.io/projected/4d0c57e4-d5a7-4d9b-adfa-61c2840724b3-kube-api-access-6skkq\") pod \"package-server-manager-789f6589d5-5tf5r\" (UID: \"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.771573 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.771638 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.786539 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-89nf8" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.793858 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-p76lr" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.802700 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-wn65n"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.808582 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-sf5xk"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.850257 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.850535 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.35052369 +0000 UTC m=+149.202378136 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.916500 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64"] Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.941575 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.951867 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.952131 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.952178 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.452158931 +0000 UTC m=+149.304013377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.952992 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:17 crc kubenswrapper[4717]: E1002 14:23:17.953322 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.453306992 +0000 UTC m=+149.305161438 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:17 crc kubenswrapper[4717]: I1002 14:23:17.958075 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" Oct 02 14:23:17 crc kubenswrapper[4717]: W1002 14:23:17.983850 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod548b2b20_6dc1_4d5e_a3be_33d278e22b7c.slice/crio-5c861ca0d8cb9e519b608d3cbaac5d60627cd44117c33d1bc305be5db5c05850 WatchSource:0}: Error finding container 5c861ca0d8cb9e519b608d3cbaac5d60627cd44117c33d1bc305be5db5c05850: Status 404 returned error can't find the container with id 5c861ca0d8cb9e519b608d3cbaac5d60627cd44117c33d1bc305be5db5c05850 Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:17.995253 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf2943f5f_caac_4566_b42e_5ef273488f2f.slice/crio-18d0b8f4b11e48a3ddc326599f92c9902fdd81ec94ae7f51eb9ddb009a140160 WatchSource:0}: Error finding container 18d0b8f4b11e48a3ddc326599f92c9902fdd81ec94ae7f51eb9ddb009a140160: Status 404 returned error can't find the container with id 18d0b8f4b11e48a3ddc326599f92c9902fdd81ec94ae7f51eb9ddb009a140160 Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:17.995789 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafb1dfea_fc57_4b25_a65b_0bc13a7c861a.slice/crio-80748b19d4804f1172a0159819bebc5b57ef960782c1b95318e98950bd87d338 WatchSource:0}: Error finding container 80748b19d4804f1172a0159819bebc5b57ef960782c1b95318e98950bd87d338: Status 404 returned error can't find the container with id 80748b19d4804f1172a0159819bebc5b57ef960782c1b95318e98950bd87d338 Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:18.002344 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614f493_81ac_4fa9_bb29_ee9fd24e8d5f.slice/crio-b7c58ddd0b6028a787d3fe2853d7dea180a324d0f89a01432c8b5f634f9e465c WatchSource:0}: Error finding container b7c58ddd0b6028a787d3fe2853d7dea180a324d0f89a01432c8b5f634f9e465c: Status 404 returned error can't find the container with id b7c58ddd0b6028a787d3fe2853d7dea180a324d0f89a01432c8b5f634f9e465c Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.025489 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx"] Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:18.026369 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f7796d9_6e2b_4053_8f62_096fc143c1be.slice/crio-5e0e4767354f8bacc226307668951ac4e2e058e28154a8f4feef3735c309a6b3 WatchSource:0}: Error finding container 5e0e4767354f8bacc226307668951ac4e2e058e28154a8f4feef3735c309a6b3: Status 404 returned error can't find the container with id 5e0e4767354f8bacc226307668951ac4e2e058e28154a8f4feef3735c309a6b3 Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.033789 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.054140 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.054654 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.554636434 +0000 UTC m=+149.406490880 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.144107 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.155843 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.156385 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.656367819 +0000 UTC m=+149.508222265 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.158262 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:18 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:18 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:18 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.158325 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.195390 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2z55s"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.198365 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xwrtk"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.204338 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.211543 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h6pmp"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.211900 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4mg6p"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.257542 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.257853 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.757837827 +0000 UTC m=+149.609692273 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.280510 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-x9gbl"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.305595 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.330892 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl"] Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:18.333632 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod400b985e_f209_45b1_afa7_6904803111e5.slice/crio-222c3cbfd273d26b751e4e0946562397d48ae8617258bc7fe3164e8605c8bf28 WatchSource:0}: Error finding container 222c3cbfd273d26b751e4e0946562397d48ae8617258bc7fe3164e8605c8bf28: Status 404 returned error can't find the container with id 222c3cbfd273d26b751e4e0946562397d48ae8617258bc7fe3164e8605c8bf28 Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:18.350143 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb280459a_455a_402b_8f80_48b3d1e4588e.slice/crio-5ee82208fd49a5b6f6f7e3c03f78e0658d3e3fbb07f00a8f0a5147a9d93585a6 WatchSource:0}: Error finding container 5ee82208fd49a5b6f6f7e3c03f78e0658d3e3fbb07f00a8f0a5147a9d93585a6: Status 404 returned error can't find the container with id 5ee82208fd49a5b6f6f7e3c03f78e0658d3e3fbb07f00a8f0a5147a9d93585a6 Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.359845 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.360284 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.860270459 +0000 UTC m=+149.712124905 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.395403 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.408754 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xcjdc"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.460219 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.460749 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:18.960721008 +0000 UTC m=+149.812575454 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:18.473521 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d800ae3_6269_4dd6_8c80_1f00153084e4.slice/crio-4b3aa14b2d47b5a18978bc3391acd7985a064fe8960b1f98ae9bb1c9d3a33ba4 WatchSource:0}: Error finding container 4b3aa14b2d47b5a18978bc3391acd7985a064fe8960b1f98ae9bb1c9d3a33ba4: Status 404 returned error can't find the container with id 4b3aa14b2d47b5a18978bc3391acd7985a064fe8960b1f98ae9bb1c9d3a33ba4 Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:18.480135 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda43c1f8_cc18_485d_ba7e_f8761d29584c.slice/crio-335ddb449c28da92aa366567d1beb6778ac6f69103096c8e4def8a23e53224ad WatchSource:0}: Error finding container 335ddb449c28da92aa366567d1beb6778ac6f69103096c8e4def8a23e53224ad: Status 404 returned error can't find the container with id 335ddb449c28da92aa366567d1beb6778ac6f69103096c8e4def8a23e53224ad Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:18.489456 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod472afeb7_afee_4a47_ae7f_2c879d9d1dac.slice/crio-467149f5afae6d961ba9f4df5418c9428f10bdd6cb1c08555c622fee58b67296 WatchSource:0}: Error finding container 467149f5afae6d961ba9f4df5418c9428f10bdd6cb1c08555c622fee58b67296: Status 404 returned error can't find the container with id 467149f5afae6d961ba9f4df5418c9428f10bdd6cb1c08555c622fee58b67296 Oct 02 14:23:18 crc kubenswrapper[4717]: W1002 14:23:18.493005 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa4a4f1e_edd6_430a_900d_ce172449b50b.slice/crio-9b0ed89e5899d38ed03bfd829a8180d493dc2e965711a23d2684f2298f6a482e WatchSource:0}: Error finding container 9b0ed89e5899d38ed03bfd829a8180d493dc2e965711a23d2684f2298f6a482e: Status 404 returned error can't find the container with id 9b0ed89e5899d38ed03bfd829a8180d493dc2e965711a23d2684f2298f6a482e Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.562869 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.563714 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.063700346 +0000 UTC m=+149.915554782 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.603312 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.622817 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.622865 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.629800 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.639886 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" event={"ID":"ab766803-fd23-476d-a273-ddf3c6dd237b","Type":"ContainerStarted","Data":"29aa73bd90d84b897d9c37e26dc5a04d5e3800bb4eb816ba787ded72a2db8b1d"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.645437 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r4gdr" event={"ID":"f2943f5f-caac-4566-b42e-5ef273488f2f","Type":"ContainerStarted","Data":"18d0b8f4b11e48a3ddc326599f92c9902fdd81ec94ae7f51eb9ddb009a140160"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.649767 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" event={"ID":"279ce48f-4f2a-407a-bec4-2e0752b71c2c","Type":"ContainerStarted","Data":"12822051aba7f76a97666c87be8e670aef27abbc45d551545e898b4838f617b6"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.649815 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" event={"ID":"279ce48f-4f2a-407a-bec4-2e0752b71c2c","Type":"ContainerStarted","Data":"66c3f64a618b3868356583221fd71a34d624e32c747fd4c047480f9adadd645b"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.650982 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" event={"ID":"400b985e-f209-45b1-afa7-6904803111e5","Type":"ContainerStarted","Data":"222c3cbfd273d26b751e4e0946562397d48ae8617258bc7fe3164e8605c8bf28"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.668298 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.668445 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.168419741 +0000 UTC m=+150.020274187 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.668609 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.668917 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.168903593 +0000 UTC m=+150.020758039 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.671625 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2z55s" event={"ID":"1b3f6b3b-4788-41a8-ad23-71d0595da58e","Type":"ContainerStarted","Data":"a599acfacad2cedaf0d570eacbf95e4ae0ddf29854e7d9503e9f1a6f027e5a64"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.673348 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" event={"ID":"c3c15c60-2dce-4e54-9319-99e143d330c0","Type":"ContainerStarted","Data":"861fb5167d7a35dcdc3a2ba44b23b2243a584ab226058c38aea89efb1075ea65"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.674757 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xcjdc" event={"ID":"aa4a4f1e-edd6-430a-900d-ce172449b50b","Type":"ContainerStarted","Data":"9b0ed89e5899d38ed03bfd829a8180d493dc2e965711a23d2684f2298f6a482e"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.677215 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" event={"ID":"9329366a-9fdd-45dd-9c5d-6139f6cc64c2","Type":"ContainerStarted","Data":"24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.677773 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.679982 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.708178 4717 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-qmztn container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" start-of-body= Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.708242 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" podUID="9329366a-9fdd-45dd-9c5d-6139f6cc64c2" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.708636 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" event={"ID":"b280459a-455a-402b-8f80-48b3d1e4588e","Type":"ContainerStarted","Data":"5ee82208fd49a5b6f6f7e3c03f78e0658d3e3fbb07f00a8f0a5147a9d93585a6"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.709969 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-d6qrw" podStartSLOduration=126.709949132 podStartE2EDuration="2m6.709949132s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:18.70909685 +0000 UTC m=+149.560951296" watchObservedRunningTime="2025-10-02 14:23:18.709949132 +0000 UTC m=+149.561803578" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.740220 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" event={"ID":"afb1dfea-fc57-4b25-a65b-0bc13a7c861a","Type":"ContainerStarted","Data":"80748b19d4804f1172a0159819bebc5b57ef960782c1b95318e98950bd87d338"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.755443 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" event={"ID":"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f","Type":"ContainerStarted","Data":"b7c58ddd0b6028a787d3fe2853d7dea180a324d0f89a01432c8b5f634f9e465c"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.757315 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" event={"ID":"aed4d244-0337-468e-a9d5-d9b0ea805a41","Type":"ContainerStarted","Data":"4abd6e0e388855b520b122119b68e8380b16a9655f4632e30a65593545cf78e5"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.758924 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" event={"ID":"da43c1f8-cc18-485d-ba7e-f8761d29584c","Type":"ContainerStarted","Data":"335ddb449c28da92aa366567d1beb6778ac6f69103096c8e4def8a23e53224ad"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.769489 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.769811 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.269781865 +0000 UTC m=+150.121636321 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.770068 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.770184 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" podStartSLOduration=126.770163594 podStartE2EDuration="2m6.770163594s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:18.740395838 +0000 UTC m=+149.592250284" watchObservedRunningTime="2025-10-02 14:23:18.770163594 +0000 UTC m=+149.622018040" Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.772343 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.272324053 +0000 UTC m=+150.124178499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.775365 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" event={"ID":"1f7796d9-6e2b-4053-8f62-096fc143c1be","Type":"ContainerStarted","Data":"5e0e4767354f8bacc226307668951ac4e2e058e28154a8f4feef3735c309a6b3"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.785503 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" event={"ID":"8d956a66-02c1-42f4-8b84-0772796d4ff5","Type":"ContainerStarted","Data":"66f92480a794850fe5adfb0814bc27d68883bfe6e8a32f1453ad6b170d8c5ece"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.785547 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" event={"ID":"8d956a66-02c1-42f4-8b84-0772796d4ff5","Type":"ContainerStarted","Data":"920d7e5e6cfba221e3cc662692bec522a42c4e848a7bb6777d22542f644c5529"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.787641 4717 generic.go:334] "Generic (PLEG): container finished" podID="e75ba71d-5bb5-435f-b450-d47523b91d73" containerID="c8c82914e1aef66c4e2fdcf45642bb91d5796f64d223449fc602803ba2d4c37e" exitCode=0 Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.787706 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" event={"ID":"e75ba71d-5bb5-435f-b450-d47523b91d73","Type":"ContainerDied","Data":"c8c82914e1aef66c4e2fdcf45642bb91d5796f64d223449fc602803ba2d4c37e"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.800959 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" event={"ID":"7d5177ac-fd1e-4f0a-822a-cb329a91829a","Type":"ContainerStarted","Data":"0d0e7ca742511101f893b75ee344faf03cc5a907d304ba2e71e6f437c3eda72d"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.810585 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" event={"ID":"b45535f7-1aaf-48bc-bd92-6ee1f56efe78","Type":"ContainerStarted","Data":"a059fb150173ae1be316383b869eabcc7daf01289c9e4ef4948f11b3751c7a78"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.811071 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" event={"ID":"472afeb7-afee-4a47-ae7f-2c879d9d1dac","Type":"ContainerStarted","Data":"467149f5afae6d961ba9f4df5418c9428f10bdd6cb1c08555c622fee58b67296"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.870774 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.871429 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.371407786 +0000 UTC m=+150.223262302 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.972069 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:18 crc kubenswrapper[4717]: E1002 14:23:18.972613 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.472601436 +0000 UTC m=+150.324455882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973868 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973887 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" event={"ID":"548b2b20-6dc1-4d5e-a3be-33d278e22b7c","Type":"ContainerStarted","Data":"f3911073d8a9f878ef71c51c4948e4ff6d7f11174aa96feaff116a91a90710e1"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973901 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" event={"ID":"548b2b20-6dc1-4d5e-a3be-33d278e22b7c","Type":"ContainerStarted","Data":"5c861ca0d8cb9e519b608d3cbaac5d60627cd44117c33d1bc305be5db5c05850"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973916 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-sf5xk" event={"ID":"41b77629-2976-425f-b71c-a7f2e9686f11","Type":"ContainerStarted","Data":"d06ab47d52ba7a9c221c8e39ef60cdd257e09c837a395e590f4d717c66e0a98c"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973925 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-p76lr" event={"ID":"33230d31-2ca9-424d-9a04-5e8ab8c04663","Type":"ContainerStarted","Data":"76b12f5c8a49785dc2bb31d7d0a143ea3653f6e9b3102daa4cc1dbf60091e8ba"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973952 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" event={"ID":"4d800ae3-6269-4dd6-8c80-1f00153084e4","Type":"ContainerStarted","Data":"4b3aa14b2d47b5a18978bc3391acd7985a064fe8960b1f98ae9bb1c9d3a33ba4"} Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973965 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973979 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kwgnk"] Oct 02 14:23:18 crc kubenswrapper[4717]: I1002 14:23:18.973987 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.072817 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-l2f4x" podStartSLOduration=127.072799389 podStartE2EDuration="2m7.072799389s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:19.071140334 +0000 UTC m=+149.922994780" watchObservedRunningTime="2025-10-02 14:23:19.072799389 +0000 UTC m=+149.924653835" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.074416 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.074572 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.574555396 +0000 UTC m=+150.426409842 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.074859 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.076227 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.57621536 +0000 UTC m=+150.428069806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: W1002 14:23:19.094903 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36b7da2e_056d_4b81_b1ae_1c919d74c7a8.slice/crio-d033ee4b261198cfb8ee9d96bd93c76e000c6928c38db7ad70dc4d5de9f02d0e WatchSource:0}: Error finding container d033ee4b261198cfb8ee9d96bd93c76e000c6928c38db7ad70dc4d5de9f02d0e: Status 404 returned error can't find the container with id d033ee4b261198cfb8ee9d96bd93c76e000c6928c38db7ad70dc4d5de9f02d0e Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.144823 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:19 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:19 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:19 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.145247 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.166549 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rncj"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.182059 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.182410 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.682394134 +0000 UTC m=+150.534248580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.284430 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.284712 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.784700913 +0000 UTC m=+150.636555359 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.328563 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.338567 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.353206 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.385336 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.385913 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.885892042 +0000 UTC m=+150.737746488 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.404306 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.411277 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.443859 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4cx4" podStartSLOduration=127.443831385 podStartE2EDuration="2m7.443831385s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:19.443815604 +0000 UTC m=+150.295670050" watchObservedRunningTime="2025-10-02 14:23:19.443831385 +0000 UTC m=+150.295685831" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.464402 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.477677 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-89nf8"] Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.491184 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.494175 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:19.994158522 +0000 UTC m=+150.846012958 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.527238 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" podStartSLOduration=127.527221027 podStartE2EDuration="2m7.527221027s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:19.52395088 +0000 UTC m=+150.375805326" watchObservedRunningTime="2025-10-02 14:23:19.527221027 +0000 UTC m=+150.379075473" Oct 02 14:23:19 crc kubenswrapper[4717]: W1002 14:23:19.543110 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5768c1b_0a6c_4683_8793_bcba4d0d07d9.slice/crio-a54352f9fb1738a6fc7f8ff4df4a293bed2d04b4b37c32b662ee33d8bd7024db WatchSource:0}: Error finding container a54352f9fb1738a6fc7f8ff4df4a293bed2d04b4b37c32b662ee33d8bd7024db: Status 404 returned error can't find the container with id a54352f9fb1738a6fc7f8ff4df4a293bed2d04b4b37c32b662ee33d8bd7024db Oct 02 14:23:19 crc kubenswrapper[4717]: W1002 14:23:19.550691 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb1a4ea4_8988_4648_9359_21eca40a8b29.slice/crio-458fe2a1abd6ceddd567314e8006894d28ce08bed1f51524a7b23379a4bc013c WatchSource:0}: Error finding container 458fe2a1abd6ceddd567314e8006894d28ce08bed1f51524a7b23379a4bc013c: Status 404 returned error can't find the container with id 458fe2a1abd6ceddd567314e8006894d28ce08bed1f51524a7b23379a4bc013c Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.558648 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jl7d4" podStartSLOduration=128.558630609 podStartE2EDuration="2m8.558630609s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:19.554371635 +0000 UTC m=+150.406226081" watchObservedRunningTime="2025-10-02 14:23:19.558630609 +0000 UTC m=+150.410485055" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.602683 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.603106 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.103091638 +0000 UTC m=+150.954946084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.707108 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.707493 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.207479054 +0000 UTC m=+151.059333500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.809460 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.809689 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.30966291 +0000 UTC m=+151.161517356 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.810127 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.810170 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.810193 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.810221 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.810277 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.810523 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.310510032 +0000 UTC m=+151.162364478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.818886 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.819067 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.827103 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.830576 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.855507 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.863638 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.870920 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.913407 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.913521 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.413503011 +0000 UTC m=+151.265357457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.913914 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:19 crc kubenswrapper[4717]: E1002 14:23:19.914231 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.41421839 +0000 UTC m=+151.266072836 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.950672 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" event={"ID":"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4","Type":"ContainerStarted","Data":"0a7a86a5220960d2369b54ffed7451328622c00616713470f9a20d056f6110c1"} Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.950741 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" event={"ID":"65bfcfc1-e7e3-4058-b4bd-5d987a274ba4","Type":"ContainerStarted","Data":"8b428db9dbb3064ccd7634a19d7cc70e3e64eede566c08c48063e4221761601e"} Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.951838 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" event={"ID":"14962662-bec2-4616-b950-69bea84d99d0","Type":"ContainerStarted","Data":"3b444c6c869fc9c19db809cdf37385ce994a236a5fccc4beccf5f46a8425747d"} Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.957691 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" event={"ID":"f614f493-81ac-4fa9-bb29-ee9fd24e8d5f","Type":"ContainerStarted","Data":"3a04a185438ba4301d1d3d06acb41c8452385ac2061f58a3cd98e1d1b4544e0d"} Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.961001 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" event={"ID":"cb1a4ea4-8988-4648-9359-21eca40a8b29","Type":"ContainerStarted","Data":"458fe2a1abd6ceddd567314e8006894d28ce08bed1f51524a7b23379a4bc013c"} Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.976702 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" event={"ID":"ad9781fa-330d-4741-b182-0fdf0d1c394d","Type":"ContainerStarted","Data":"0716996e3320282fbc8fed5c582e4de895aa32b8370d7f623e3d504cc1fbb69f"} Oct 02 14:23:19 crc kubenswrapper[4717]: I1002 14:23:19.982079 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" event={"ID":"a37f04f8-452e-4e02-8604-11e976bb5803","Type":"ContainerStarted","Data":"4aa12a3672e137ae22ced1be160ee61ebb80e988015ca5c152ad2c2c78f66af3"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.001242 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xcjdc" event={"ID":"aa4a4f1e-edd6-430a-900d-ce172449b50b","Type":"ContainerStarted","Data":"a023ba5bc39c216cdbc9430e22bcd4853be5e27b7822b30845d88aecdc776da7"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.002200 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-sf5xk" event={"ID":"41b77629-2976-425f-b71c-a7f2e9686f11","Type":"ContainerStarted","Data":"5029ce89c4ce750179e4088471ed4e4f1fe6d6b72f50625fa9c9b87085b35f04"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.003003 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-sf5xk" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.005054 4717 patch_prober.go:28] interesting pod/downloads-7954f5f757-sf5xk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.005099 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sf5xk" podUID="41b77629-2976-425f-b71c-a7f2e9686f11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.005525 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" event={"ID":"2774fb34-ff9f-408e-a493-f1db8d7d8dc1","Type":"ContainerStarted","Data":"b6adb91404f75719e0cca4f21c0097f5aa92dc479de56176d85f74a69e3f5c42"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.016201 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.016623 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.516600342 +0000 UTC m=+151.368454788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.016809 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.017766 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.517754642 +0000 UTC m=+151.369609088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.019181 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" event={"ID":"afb1dfea-fc57-4b25-a65b-0bc13a7c861a","Type":"ContainerStarted","Data":"7d7e04718a81387a74920ba8bb49c1119bddd88213af9edfe18deca607526e67"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.057214 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-r4gdr" event={"ID":"f2943f5f-caac-4566-b42e-5ef273488f2f","Type":"ContainerStarted","Data":"82c71175bd566e71f879c8f1b47253a48d8e0446751c607e523906549fa98a1e"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.063547 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" event={"ID":"400b985e-f209-45b1-afa7-6904803111e5","Type":"ContainerStarted","Data":"72714e61030a4d0c03d4ab60c9b5c175d76be485d31770c7016a04ae4e662f1f"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.101508 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-n9m2d" podStartSLOduration=128.101490014 podStartE2EDuration="2m8.101490014s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.090995494 +0000 UTC m=+150.942849940" watchObservedRunningTime="2025-10-02 14:23:20.101490014 +0000 UTC m=+150.953344460" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.106615 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9gcmm" podStartSLOduration=128.106594691 podStartE2EDuration="2m8.106594691s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.029380174 +0000 UTC m=+150.881234620" watchObservedRunningTime="2025-10-02 14:23:20.106594691 +0000 UTC m=+150.958449137" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.117707 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.119284 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.61925977 +0000 UTC m=+151.471114216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.121640 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" event={"ID":"7d5177ac-fd1e-4f0a-822a-cb329a91829a","Type":"ContainerStarted","Data":"c80f234c9a7a05a968edfb722d15cc0584f0d6e3f8306d49ba40445256113e41"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.123325 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.127332 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.627315396 +0000 UTC m=+151.479169912 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.131311 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" event={"ID":"da43c1f8-cc18-485d-ba7e-f8761d29584c","Type":"ContainerStarted","Data":"b5e20f5fb301f432ecc34e629a1b038a7c7f18c91bb686596ebdcde00162e871"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.139369 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" event={"ID":"aed4d244-0337-468e-a9d5-d9b0ea805a41","Type":"ContainerStarted","Data":"af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.140503 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.140709 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:20 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:20 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:20 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.140746 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.144304 4717 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-5zkkr container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.144358 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" podUID="aed4d244-0337-468e-a9d5-d9b0ea805a41" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.151056 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" event={"ID":"8d956a66-02c1-42f4-8b84-0772796d4ff5","Type":"ContainerStarted","Data":"f21b67a0c5f8dd4941fec862cdd9e3a9e68f432c58ebb1e59284c0c88bd442eb"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.153769 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-sf5xk" podStartSLOduration=129.153752194 podStartE2EDuration="2m9.153752194s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.152444349 +0000 UTC m=+151.004298785" watchObservedRunningTime="2025-10-02 14:23:20.153752194 +0000 UTC m=+151.005606640" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.189172 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" podStartSLOduration=128.189154672 podStartE2EDuration="2m8.189154672s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.188387961 +0000 UTC m=+151.040242417" watchObservedRunningTime="2025-10-02 14:23:20.189154672 +0000 UTC m=+151.041009118" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.216894 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" event={"ID":"75ea49ac-bda7-4812-8107-d79902f26b2c","Type":"ContainerStarted","Data":"4e89707f1f986f7856dd3eedcbd79ee55ebe89257bc8b2a8fa1dc49a6c9ce468"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.216962 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" event={"ID":"75ea49ac-bda7-4812-8107-d79902f26b2c","Type":"ContainerStarted","Data":"c559b80b131dc22de56771741d1e7f5dbe99de49243eb7b965acdbd36d30267f"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.217762 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.221416 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-2vzcx" podStartSLOduration=129.221395015 podStartE2EDuration="2m9.221395015s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.221132569 +0000 UTC m=+151.072987005" watchObservedRunningTime="2025-10-02 14:23:20.221395015 +0000 UTC m=+151.073249461" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.227680 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.228791 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.728777283 +0000 UTC m=+151.580631729 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.241816 4717 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-f8mgk container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.241897 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" podUID="75ea49ac-bda7-4812-8107-d79902f26b2c" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.271125 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" event={"ID":"36b7da2e-056d-4b81-b1ae-1c919d74c7a8","Type":"ContainerStarted","Data":"d033ee4b261198cfb8ee9d96bd93c76e000c6928c38db7ad70dc4d5de9f02d0e"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.297152 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vbgxl" podStartSLOduration=128.297135484 podStartE2EDuration="2m8.297135484s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.254376858 +0000 UTC m=+151.106231324" watchObservedRunningTime="2025-10-02 14:23:20.297135484 +0000 UTC m=+151.148989930" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.298274 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-r4gdr" podStartSLOduration=129.298268304 podStartE2EDuration="2m9.298268304s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.295006216 +0000 UTC m=+151.146860652" watchObservedRunningTime="2025-10-02 14:23:20.298268304 +0000 UTC m=+151.150122750" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.322306 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" event={"ID":"3ab0a12a-d53c-4703-8f25-3aabfc5ba7d3","Type":"ContainerStarted","Data":"a89d7d3bb727d64c96a2053a273137044eb0738feac4fd716bb2b7c0e4d4f8b6"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.332819 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.333243 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.83322968 +0000 UTC m=+151.685084126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.387512 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-h6pmp" podStartSLOduration=128.387494564 podStartE2EDuration="2m8.387494564s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.331814852 +0000 UTC m=+151.183669308" watchObservedRunningTime="2025-10-02 14:23:20.387494564 +0000 UTC m=+151.239349010" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.389094 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" podStartSLOduration=128.389086266 podStartE2EDuration="2m8.389086266s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.386534588 +0000 UTC m=+151.238389034" watchObservedRunningTime="2025-10-02 14:23:20.389086266 +0000 UTC m=+151.240940712" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.401215 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" event={"ID":"6fbbc834-46e7-4024-9a5e-9602f4f98138","Type":"ContainerStarted","Data":"7b481c3dd610bc02cab2bf25f68c4940c6304b0c7b7e440a759616f5a3aa1132"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.401253 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.401262 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" event={"ID":"6fbbc834-46e7-4024-9a5e-9602f4f98138","Type":"ContainerStarted","Data":"711e777a0fe666663ea827e7e3f9f5efb0ba123cb8af94f0ffc10c3e26f47184"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.410653 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" event={"ID":"c3c15c60-2dce-4e54-9319-99e143d330c0","Type":"ContainerStarted","Data":"0c25b719555308a5ff07fdc0dc861d7ec4eb5b405e0494aff57b86ea2f161461"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.428664 4717 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2cdqg container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.428715 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" podUID="6fbbc834-46e7-4024-9a5e-9602f4f98138" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.431716 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" podStartSLOduration=128.431704247 podStartE2EDuration="2m8.431704247s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.431599814 +0000 UTC m=+151.283454260" watchObservedRunningTime="2025-10-02 14:23:20.431704247 +0000 UTC m=+151.283558693" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.438106 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.438886 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:20.938843358 +0000 UTC m=+151.790697804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.462277 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" podStartSLOduration=128.462251695 podStartE2EDuration="2m8.462251695s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.454507057 +0000 UTC m=+151.306361503" watchObservedRunningTime="2025-10-02 14:23:20.462251695 +0000 UTC m=+151.314106141" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.468704 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" event={"ID":"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3","Type":"ContainerStarted","Data":"36667584cc54805826232121e8f505c74dc47c49431ce4f11a58d49bcdf995a8"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.482062 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4nwjx" podStartSLOduration=128.482041785 podStartE2EDuration="2m8.482041785s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.479302611 +0000 UTC m=+151.331157057" watchObservedRunningTime="2025-10-02 14:23:20.482041785 +0000 UTC m=+151.333896231" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.512856 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" event={"ID":"b5768c1b-0a6c-4683-8793-bcba4d0d07d9","Type":"ContainerStarted","Data":"a54352f9fb1738a6fc7f8ff4df4a293bed2d04b4b37c32b662ee33d8bd7024db"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.531152 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" event={"ID":"472afeb7-afee-4a47-ae7f-2c879d9d1dac","Type":"ContainerStarted","Data":"bdc10a1190c84957f560c947a5cb7abbb4da229cc69f429b66d5814b5626b9aa"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.542004 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" event={"ID":"58abdd37-1588-4dce-b4b8-bfa17b906ca4","Type":"ContainerStarted","Data":"e6bf6d890dfd2eccbe3ffde99c6af43a29af8e9f547cabaa82af68dae47bffeb"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.542437 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.542820 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.042807151 +0000 UTC m=+151.894661597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.551876 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-p76lr" event={"ID":"33230d31-2ca9-424d-9a04-5e8ab8c04663","Type":"ContainerStarted","Data":"97c622bad0c67492227c827aceebcc4c601fab7b0bf095aa85e91e467abe05fb"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.561918 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s2bkr" podStartSLOduration=128.561898343 podStartE2EDuration="2m8.561898343s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.561202405 +0000 UTC m=+151.413056851" watchObservedRunningTime="2025-10-02 14:23:20.561898343 +0000 UTC m=+151.413752789" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.617045 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" podStartSLOduration=128.617027429 podStartE2EDuration="2m8.617027429s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.614771309 +0000 UTC m=+151.466625745" watchObservedRunningTime="2025-10-02 14:23:20.617027429 +0000 UTC m=+151.468881875" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.658206 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.659106 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.159091606 +0000 UTC m=+152.010946042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.721090 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" event={"ID":"1f7796d9-6e2b-4053-8f62-096fc143c1be","Type":"ContainerStarted","Data":"258b34321475491f9b87d4643560a5fd35169a280368ef46a82a79f11f7e1392"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.756841 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" event={"ID":"a1756fd2-89da-4978-9a32-2201fbff47ee","Type":"ContainerStarted","Data":"3259179e84819b026d25c0adde03621cb11fbee4c00f734f91be4ce048f4fe6c"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.762713 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.763096 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.263084641 +0000 UTC m=+152.114939087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.777160 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9jx64" podStartSLOduration=128.777138937 podStartE2EDuration="2m8.777138937s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.777069685 +0000 UTC m=+151.628924131" watchObservedRunningTime="2025-10-02 14:23:20.777138937 +0000 UTC m=+151.628993373" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.778131 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" event={"ID":"b280459a-455a-402b-8f80-48b3d1e4588e","Type":"ContainerStarted","Data":"50f8d91f1e07e575b3db77b4dc48522b19fcd177d3a85394a8faa5147792c9cb"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.778860 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-p76lr" podStartSLOduration=6.778851442 podStartE2EDuration="6.778851442s" podCreationTimestamp="2025-10-02 14:23:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.65740183 +0000 UTC m=+151.509256276" watchObservedRunningTime="2025-10-02 14:23:20.778851442 +0000 UTC m=+151.630705888" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.795648 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-89nf8" event={"ID":"a28c9a08-8ab6-4fa6-bb25-607170b0b934","Type":"ContainerStarted","Data":"6b037b35dde6515c9353ae43d34f760f581ed5910ede816c5b2af40dcda9709b"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.829655 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-89nf8" podStartSLOduration=6.829639172 podStartE2EDuration="6.829639172s" podCreationTimestamp="2025-10-02 14:23:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:20.828647796 +0000 UTC m=+151.680502242" watchObservedRunningTime="2025-10-02 14:23:20.829639172 +0000 UTC m=+151.681493618" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.829988 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" event={"ID":"ab766803-fd23-476d-a273-ddf3c6dd237b","Type":"ContainerDied","Data":"a0e52c48e5b3d09dec5efac163b8ac139051ca05bdddb7398191ff0b71c34166"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.831326 4717 generic.go:334] "Generic (PLEG): container finished" podID="ab766803-fd23-476d-a273-ddf3c6dd237b" containerID="a0e52c48e5b3d09dec5efac163b8ac139051ca05bdddb7398191ff0b71c34166" exitCode=0 Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.863917 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.865496 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.365478322 +0000 UTC m=+152.217332768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.931427 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2z55s" event={"ID":"1b3f6b3b-4788-41a8-ad23-71d0595da58e","Type":"ContainerStarted","Data":"2596f97f388a6db84f57515e35b57165b859b6d7c9a881a478190d3918fe1c04"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.931489 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.958870 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" event={"ID":"4d800ae3-6269-4dd6-8c80-1f00153084e4","Type":"ContainerStarted","Data":"9255b8d4842fe3188dee10e9f04bb19f15b08d169f51925b3c5e8c0b2e84102b"} Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.965750 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:20 crc kubenswrapper[4717]: E1002 14:23:20.966064 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.466054166 +0000 UTC m=+152.317908612 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.981340 4717 patch_prober.go:28] interesting pod/console-operator-58897d9998-2z55s container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.981393 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2z55s" podUID="1b3f6b3b-4788-41a8-ad23-71d0595da58e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 02 14:23:20 crc kubenswrapper[4717]: I1002 14:23:20.986066 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.068824 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.069943 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.569913597 +0000 UTC m=+152.421768043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.163595 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:21 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:21 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:21 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.163858 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.173470 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.201120 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.700813101 +0000 UTC m=+152.552667547 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.275564 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.276449 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.776432916 +0000 UTC m=+152.628287362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.383500 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.384225 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.884212712 +0000 UTC m=+152.736067158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.492172 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.492377 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:21.992360569 +0000 UTC m=+152.844215015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.595790 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.596281 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.096269821 +0000 UTC m=+152.948124257 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.690225 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" podStartSLOduration=129.690204066 podStartE2EDuration="2m9.690204066s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:21.689205679 +0000 UTC m=+152.541060125" watchObservedRunningTime="2025-10-02 14:23:21.690204066 +0000 UTC m=+152.542058512" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.699017 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.699228 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.199213447 +0000 UTC m=+153.051067893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.734057 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-2z55s" podStartSLOduration=130.73403741 podStartE2EDuration="2m10.73403741s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:21.732614192 +0000 UTC m=+152.584468638" watchObservedRunningTime="2025-10-02 14:23:21.73403741 +0000 UTC m=+152.585891856" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.743297 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.743805 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.765411 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.801617 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.801924 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.301912698 +0000 UTC m=+153.153767144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.905964 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:21 crc kubenswrapper[4717]: E1002 14:23:21.906154 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.406139478 +0000 UTC m=+153.257993924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.965041 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" event={"ID":"7d5177ac-fd1e-4f0a-822a-cb329a91829a","Type":"ContainerStarted","Data":"c745b67c9d3879c97305e59948439671386f3a7c618ff681785be86244474bf5"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.967235 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xcjdc" event={"ID":"aa4a4f1e-edd6-430a-900d-ce172449b50b","Type":"ContainerStarted","Data":"b3b9fdfb4d232ba52425237da8af0bf45158baac9cc517509bb4da8c940793a4"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.967572 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.969203 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" event={"ID":"ab766803-fd23-476d-a273-ddf3c6dd237b","Type":"ContainerStarted","Data":"23245249154f3b96454b9d97c20c3d93873d3ab9db6c028366a8225a08e2b7a3"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.969306 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.970520 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" event={"ID":"a1756fd2-89da-4978-9a32-2201fbff47ee","Type":"ContainerStarted","Data":"5df130c1b457a6b57b63e48a256526ca86f7b4b3e380a41122dcdf5bb6b6d051"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.972984 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c87785f014e1fe2ce54239cd1e03a2eb3b0a6004de01f44e72d8042b383a86fa"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.973020 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a40d0eaabfa30d592b0065af45f7e99a1a7cb92bf616784c887103b607e77fa1"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.974631 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" event={"ID":"36b7da2e-056d-4b81-b1ae-1c919d74c7a8","Type":"ContainerStarted","Data":"16bbbe613ce13b56d4a3ff3588e7bc948ea0c14d70f85c954bbe7fb8b4ffa304"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.974653 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" event={"ID":"36b7da2e-056d-4b81-b1ae-1c919d74c7a8","Type":"ContainerStarted","Data":"f6044f058f38724e56b8a51ea5371380c53fd3827e19b11ddd1d1264e987ef34"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.977704 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" event={"ID":"cb1a4ea4-8988-4648-9359-21eca40a8b29","Type":"ContainerStarted","Data":"5de0fd9c693e8b50580d3df8de3aa3375b3a275db28c7e7b91f0cf10965780c6"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.980302 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" event={"ID":"ad9781fa-330d-4741-b182-0fdf0d1c394d","Type":"ContainerStarted","Data":"28b55c7692ab7018d786ac79ced31971bc1c8134371c112463c74b63bb65cbda"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.982532 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kwgnk" event={"ID":"58abdd37-1588-4dce-b4b8-bfa17b906ca4","Type":"ContainerStarted","Data":"0e9b24b1880a52a9b953bc79e0b7914069fb8d0474b6642383199729f0b8b576"} Oct 02 14:23:21 crc kubenswrapper[4717]: I1002 14:23:21.998713 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-x9gbl" podStartSLOduration=129.998681006 podStartE2EDuration="2m9.998681006s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:21.985451283 +0000 UTC m=+152.837305729" watchObservedRunningTime="2025-10-02 14:23:21.998681006 +0000 UTC m=+152.850535452" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.007705 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.007824 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" event={"ID":"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3","Type":"ContainerStarted","Data":"c0c797e34e4b6a5497da092a0c27cb29fe1f57712a75df0f5898c214b79ca4f4"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.007863 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" event={"ID":"4d0c57e4-d5a7-4d9b-adfa-61c2840724b3","Type":"ContainerStarted","Data":"ab9638b48e73f1c77a0fe9d57c7b72aee9f06ef502e20a741e05975e52beaa05"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.008344 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.008921 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.508906841 +0000 UTC m=+153.360761277 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.040197 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zr8w6" podStartSLOduration=130.040181608 podStartE2EDuration="2m10.040181608s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.039411987 +0000 UTC m=+152.891266433" watchObservedRunningTime="2025-10-02 14:23:22.040181608 +0000 UTC m=+152.892036054" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.048066 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" event={"ID":"14962662-bec2-4616-b950-69bea84d99d0","Type":"ContainerStarted","Data":"40cb08d095a5f373d4855c94298f1f9b7dadf42feaff673f764f38c5a85a378f"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.048287 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" event={"ID":"14962662-bec2-4616-b950-69bea84d99d0","Type":"ContainerStarted","Data":"f08d779024cae721c1670ca5a400ae40e130c0c4df320d6beba62596fcaf60a0"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.083562 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" podStartSLOduration=131.083544459 podStartE2EDuration="2m11.083544459s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.081748971 +0000 UTC m=+152.933603417" watchObservedRunningTime="2025-10-02 14:23:22.083544459 +0000 UTC m=+152.935398905" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.086274 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" event={"ID":"b5768c1b-0a6c-4683-8793-bcba4d0d07d9","Type":"ContainerStarted","Data":"a3be1c6242b2e46f7731008f2ad826389c60d3c62cd1de479070e52ced15f00e"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.100070 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-pwgz6" event={"ID":"4d800ae3-6269-4dd6-8c80-1f00153084e4","Type":"ContainerStarted","Data":"6a9dae4cc22e820a10c3d0c3adc751c61ba850cab381eb4b082a86c6af447734"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.116468 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.116853 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.616837321 +0000 UTC m=+153.468691767 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.119293 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" event={"ID":"e75ba71d-5bb5-435f-b450-d47523b91d73","Type":"ContainerStarted","Data":"6d7131a14bdff381c07b3f04e93c0643ca91d76374c18c9ae6b7f7cac19eaf40"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.119340 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" event={"ID":"e75ba71d-5bb5-435f-b450-d47523b91d73","Type":"ContainerStarted","Data":"a2e6bc4bdbaa52ddafc0270014b4f1262719e344a500cc2ca80a9bac8f0589eb"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.133173 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" event={"ID":"a37f04f8-452e-4e02-8604-11e976bb5803","Type":"ContainerStarted","Data":"2bf845276e5704ca474473b9982e56e836cde0e7ca2f32a779945462ee5ff0d1"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.133219 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" event={"ID":"a37f04f8-452e-4e02-8604-11e976bb5803","Type":"ContainerStarted","Data":"8246f96c8f010c4a1920a8ca1422a0d334fad9bdf0a40fa79c372b3100318143"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.153236 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:22 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:22 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:22 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.153293 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.163146 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"f9abf9b7c89ec1e6cd25119e83181efc8fa1bf468be44cc79cec4a1064c8e7c1"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.163195 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b0e77a8181a488b77fdfb7df20e3bfc9824fe55c2fbd569474ba9de1dd187351"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.163701 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.167661 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h7s9n" podStartSLOduration=130.167632151 podStartE2EDuration="2m10.167632151s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.1657332 +0000 UTC m=+153.017587636" watchObservedRunningTime="2025-10-02 14:23:22.167632151 +0000 UTC m=+153.019486597" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.168920 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-xcjdc" podStartSLOduration=8.168913785 podStartE2EDuration="8.168913785s" podCreationTimestamp="2025-10-02 14:23:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.122628946 +0000 UTC m=+152.974483392" watchObservedRunningTime="2025-10-02 14:23:22.168913785 +0000 UTC m=+153.020768231" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.184178 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-89nf8" event={"ID":"a28c9a08-8ab6-4fa6-bb25-607170b0b934","Type":"ContainerStarted","Data":"88772bb317c29f5ba60d1574cc14343e2a441243cc8e7be65f9bc0f91278ba96"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.185757 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" event={"ID":"b45535f7-1aaf-48bc-bd92-6ee1f56efe78","Type":"ContainerStarted","Data":"07bfc62b165282f052dedcb78db558aa4513b4b0f8d59502ba10c216b5e09a48"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.186584 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" event={"ID":"2774fb34-ff9f-408e-a493-f1db8d7d8dc1","Type":"ContainerStarted","Data":"becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.187222 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.188269 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" event={"ID":"b280459a-455a-402b-8f80-48b3d1e4588e","Type":"ContainerStarted","Data":"ae121b365f8395b5e1420b0ccb9ec7c54aca0a5396a8067553b36268dc0c75c9"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.196240 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b61b56846d5f51ee107a43b0810413eb10060ebc6a1aaa965529f5dba567535d"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.196283 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"637d67738d72035afee9837ce9ce819215a784e03c39e3f9154bb9762bf41004"} Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.196297 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.199104 4717 patch_prober.go:28] interesting pod/downloads-7954f5f757-sf5xk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.199156 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sf5xk" podUID="41b77629-2976-425f-b71c-a7f2e9686f11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.207553 4717 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8rncj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.207621 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" podUID="2774fb34-ff9f-408e-a493-f1db8d7d8dc1" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.213445 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-f8mgk" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.215298 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.218721 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.219202 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.719185691 +0000 UTC m=+153.571040137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.245100 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d2wpl" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.250022 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2cdqg" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.265225 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mptt7" podStartSLOduration=130.265205604 podStartE2EDuration="2m10.265205604s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.256711447 +0000 UTC m=+153.108565893" watchObservedRunningTime="2025-10-02 14:23:22.265205604 +0000 UTC m=+153.117060050" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.319462 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.321427 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.821408549 +0000 UTC m=+153.673262995 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.339864 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" podStartSLOduration=130.339848932 podStartE2EDuration="2m10.339848932s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.305213225 +0000 UTC m=+153.157067671" watchObservedRunningTime="2025-10-02 14:23:22.339848932 +0000 UTC m=+153.191703378" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.406110 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-r5w5f" podStartSLOduration=130.406092526 podStartE2EDuration="2m10.406092526s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.366976258 +0000 UTC m=+153.218830704" watchObservedRunningTime="2025-10-02 14:23:22.406092526 +0000 UTC m=+153.257946972" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.425865 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.426259 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:22.926247456 +0000 UTC m=+153.778101902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.456599 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" podStartSLOduration=130.456584018 podStartE2EDuration="2m10.456584018s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.454314738 +0000 UTC m=+153.306169184" watchObservedRunningTime="2025-10-02 14:23:22.456584018 +0000 UTC m=+153.308438464" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.527637 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.528275 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.028259527 +0000 UTC m=+153.880113973 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.541072 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xwvdw" podStartSLOduration=130.54105716 podStartE2EDuration="2m10.54105716s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.538702687 +0000 UTC m=+153.390557133" watchObservedRunningTime="2025-10-02 14:23:22.54105716 +0000 UTC m=+153.392911606" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.552064 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-2z55s" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.568335 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-xwrtk" podStartSLOduration=130.56831626 podStartE2EDuration="2m10.56831626s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.563217873 +0000 UTC m=+153.415072319" watchObservedRunningTime="2025-10-02 14:23:22.56831626 +0000 UTC m=+153.420170706" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.629376 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.629697 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.129685063 +0000 UTC m=+153.981539509 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.726827 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l97pk" podStartSLOduration=130.726811404 podStartE2EDuration="2m10.726811404s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.726118046 +0000 UTC m=+153.577972492" watchObservedRunningTime="2025-10-02 14:23:22.726811404 +0000 UTC m=+153.578665850" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.730347 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.730710 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.230671197 +0000 UTC m=+154.082525643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.811622 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" podStartSLOduration=130.811605995 podStartE2EDuration="2m10.811605995s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.811149743 +0000 UTC m=+153.663004189" watchObservedRunningTime="2025-10-02 14:23:22.811605995 +0000 UTC m=+153.663460441" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.813935 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" podStartSLOduration=130.813924837 podStartE2EDuration="2m10.813924837s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.76511497 +0000 UTC m=+153.616969416" watchObservedRunningTime="2025-10-02 14:23:22.813924837 +0000 UTC m=+153.665779283" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.832317 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.832627 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.332615877 +0000 UTC m=+154.184470323 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.924378 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" podStartSLOduration=130.924362454 podStartE2EDuration="2m10.924362454s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:22.922254808 +0000 UTC m=+153.774109254" watchObservedRunningTime="2025-10-02 14:23:22.924362454 +0000 UTC m=+153.776216900" Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.937505 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.937813 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.437786914 +0000 UTC m=+154.289641360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:22 crc kubenswrapper[4717]: I1002 14:23:22.938028 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:22 crc kubenswrapper[4717]: E1002 14:23:22.938466 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.438435101 +0000 UTC m=+154.290289547 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.039069 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.039313 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.53924244 +0000 UTC m=+154.391096886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.039818 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.040110 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.540101923 +0000 UTC m=+154.391956369 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.140229 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.140234 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:23 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:23 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:23 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.140406 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.640386539 +0000 UTC m=+154.492240985 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.140442 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.140795 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.64078892 +0000 UTC m=+154.492643366 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.140827 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.195816 4717 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-vvxk5 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.195864 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" podUID="65bfcfc1-e7e3-4058-b4bd-5d987a274ba4" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.202701 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" event={"ID":"b45535f7-1aaf-48bc-bd92-6ee1f56efe78","Type":"ContainerStarted","Data":"196dafcfa3edf4017b78a23239425c26e56ec8e3470ebf795c2e58f133630efa"} Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.208768 4717 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8rncj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.208787 4717 patch_prober.go:28] interesting pod/downloads-7954f5f757-sf5xk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.208825 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" podUID="2774fb34-ff9f-408e-a493-f1db8d7d8dc1" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.208853 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sf5xk" podUID="41b77629-2976-425f-b71c-a7f2e9686f11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.241744 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.241858 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.741828435 +0000 UTC m=+154.593682881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.243074 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.243496 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.743486029 +0000 UTC m=+154.595340475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.284282 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vvxk5" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.318519 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wrvgq"] Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.320533 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.328669 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.361285 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wrvgq"] Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.362005 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.362319 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-utilities\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.362432 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lq5n\" (UniqueName: \"kubernetes.io/projected/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-kube-api-access-6lq5n\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.362569 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-catalog-content\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.362837 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.862818715 +0000 UTC m=+154.714673161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.463803 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-catalog-content\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.463848 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.463899 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-utilities\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.463921 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lq5n\" (UniqueName: \"kubernetes.io/projected/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-kube-api-access-6lq5n\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.464572 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-catalog-content\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.464659 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-utilities\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.464760 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:23.964741814 +0000 UTC m=+154.816596260 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.508801 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lq5n\" (UniqueName: \"kubernetes.io/projected/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-kube-api-access-6lq5n\") pod \"certified-operators-wrvgq\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.541006 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8bq8n"] Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.542135 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.547867 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.564973 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.565162 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzcqd\" (UniqueName: \"kubernetes.io/projected/11a20c01-b527-4b1f-8b37-cb369059c70d-kube-api-access-vzcqd\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.565244 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-utilities\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.565273 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-catalog-content\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.565388 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:24.065372209 +0000 UTC m=+154.917226655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.566346 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8bq8n"] Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.662197 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.666922 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzcqd\" (UniqueName: \"kubernetes.io/projected/11a20c01-b527-4b1f-8b37-cb369059c70d-kube-api-access-vzcqd\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.667013 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-utilities\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.667034 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-catalog-content\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.667054 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.667351 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:24.167337399 +0000 UTC m=+155.019191845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.667633 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-catalog-content\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.667772 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-utilities\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.692249 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzcqd\" (UniqueName: \"kubernetes.io/projected/11a20c01-b527-4b1f-8b37-cb369059c70d-kube-api-access-vzcqd\") pod \"community-operators-8bq8n\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.713248 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kgwg8"] Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.714412 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.741104 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kgwg8"] Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.801346 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.801583 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n6wq\" (UniqueName: \"kubernetes.io/projected/54c7bec6-8a7a-4c8f-bb92-3280de831120-kube-api-access-6n6wq\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.801617 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-catalog-content\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.801658 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-utilities\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.801706 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:24.301678947 +0000 UTC m=+155.153533393 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.841943 4717 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.862368 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.902071 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-utilities\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.902128 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.902154 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n6wq\" (UniqueName: \"kubernetes.io/projected/54c7bec6-8a7a-4c8f-bb92-3280de831120-kube-api-access-6n6wq\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.902181 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-catalog-content\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.902589 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-catalog-content\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: E1002 14:23:23.902616 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:24.402599109 +0000 UTC m=+155.254453555 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.903114 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-utilities\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.906415 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pfqfs"] Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.907295 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.931080 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pfqfs"] Oct 02 14:23:23 crc kubenswrapper[4717]: I1002 14:23:23.932297 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n6wq\" (UniqueName: \"kubernetes.io/projected/54c7bec6-8a7a-4c8f-bb92-3280de831120-kube-api-access-6n6wq\") pod \"certified-operators-kgwg8\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.004472 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:24 crc kubenswrapper[4717]: E1002 14:23:24.004735 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:24.504720783 +0000 UTC m=+155.356575229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.038344 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.107154 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr2sk\" (UniqueName: \"kubernetes.io/projected/9c736886-bb37-4cee-ad32-53a707124bb4-kube-api-access-tr2sk\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.108083 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.108174 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-catalog-content\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.108206 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-utilities\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: E1002 14:23:24.109815 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-02 14:23:24.609802757 +0000 UTC m=+155.461657203 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkhsn" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.146862 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:24 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:24 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:24 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.146918 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.209431 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.209558 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-utilities\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.209625 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr2sk\" (UniqueName: \"kubernetes.io/projected/9c736886-bb37-4cee-ad32-53a707124bb4-kube-api-access-tr2sk\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.209667 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-catalog-content\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.210091 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-catalog-content\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.210509 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-utilities\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: E1002 14:23:24.210567 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-02 14:23:24.710554515 +0000 UTC m=+155.562408961 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.240380 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" event={"ID":"b45535f7-1aaf-48bc-bd92-6ee1f56efe78","Type":"ContainerStarted","Data":"fe9ea3161781d668b1b9de473940961adb16e80355ff60cc32869e4ef8a64a80"} Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.240416 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" event={"ID":"b45535f7-1aaf-48bc-bd92-6ee1f56efe78","Type":"ContainerStarted","Data":"86f29c25aa1b838531bc4a85ed51d7b54a97ca2cc238dc96adac43debbb1ed3a"} Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.249573 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.286784 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr2sk\" (UniqueName: \"kubernetes.io/projected/9c736886-bb37-4cee-ad32-53a707124bb4-kube-api-access-tr2sk\") pod \"community-operators-pfqfs\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.296011 4717 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-02T14:23:23.843085816Z","Handler":null,"Name":""} Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.306687 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-4mg6p" podStartSLOduration=10.306669999 podStartE2EDuration="10.306669999s" podCreationTimestamp="2025-10-02 14:23:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:24.300431152 +0000 UTC m=+155.152285598" watchObservedRunningTime="2025-10-02 14:23:24.306669999 +0000 UTC m=+155.158524445" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.323180 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.325110 4717 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.325146 4717 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.389952 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8bq8n"] Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.517825 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wrvgq"] Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.529783 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.567655 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kgwg8"] Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.574023 4717 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.574093 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:24 crc kubenswrapper[4717]: W1002 14:23:24.577190 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54c7bec6_8a7a_4c8f_bb92_3280de831120.slice/crio-8a3a92ca0a6d6249ea5b350c24e23f0c8d1e3d29fa47a623627745ff9e20b478 WatchSource:0}: Error finding container 8a3a92ca0a6d6249ea5b350c24e23f0c8d1e3d29fa47a623627745ff9e20b478: Status 404 returned error can't find the container with id 8a3a92ca0a6d6249ea5b350c24e23f0c8d1e3d29fa47a623627745ff9e20b478 Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.736507 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkhsn\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.764686 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.768727 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pfqfs"] Oct 02 14:23:24 crc kubenswrapper[4717]: W1002 14:23:24.782140 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c736886_bb37_4cee_ad32_53a707124bb4.slice/crio-de6fc435ed79938470eca8f02a2f41f2db5f5afcd4d2b746cdfdb29931235767 WatchSource:0}: Error finding container de6fc435ed79938470eca8f02a2f41f2db5f5afcd4d2b746cdfdb29931235767: Status 404 returned error can't find the container with id de6fc435ed79938470eca8f02a2f41f2db5f5afcd4d2b746cdfdb29931235767 Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.830394 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.836264 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 02 14:23:24 crc kubenswrapper[4717]: I1002 14:23:24.855699 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.005924 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkhsn"] Oct 02 14:23:25 crc kubenswrapper[4717]: W1002 14:23:25.023234 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61253815_47e3_4e2c_a2e7_565f128dedef.slice/crio-4f64c35a169e5643f0a5a82c2dc9a8d09f83ce4932f7abf016ef471976508d6b WatchSource:0}: Error finding container 4f64c35a169e5643f0a5a82c2dc9a8d09f83ce4932f7abf016ef471976508d6b: Status 404 returned error can't find the container with id 4f64c35a169e5643f0a5a82c2dc9a8d09f83ce4932f7abf016ef471976508d6b Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.141191 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:25 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:25 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:25 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.141829 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.245314 4717 generic.go:334] "Generic (PLEG): container finished" podID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerID="f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0" exitCode=0 Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.245423 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgwg8" event={"ID":"54c7bec6-8a7a-4c8f-bb92-3280de831120","Type":"ContainerDied","Data":"f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.245462 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgwg8" event={"ID":"54c7bec6-8a7a-4c8f-bb92-3280de831120","Type":"ContainerStarted","Data":"8a3a92ca0a6d6249ea5b350c24e23f0c8d1e3d29fa47a623627745ff9e20b478"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.247088 4717 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.249135 4717 generic.go:334] "Generic (PLEG): container finished" podID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerID="e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692" exitCode=0 Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.249199 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrvgq" event={"ID":"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def","Type":"ContainerDied","Data":"e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.249223 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrvgq" event={"ID":"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def","Type":"ContainerStarted","Data":"74f1cbc49660241b03c15b0ccc9ea905a1e1b2f8379b191df38d54854932766e"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.251734 4717 generic.go:334] "Generic (PLEG): container finished" podID="9c736886-bb37-4cee-ad32-53a707124bb4" containerID="926eacc00992a3f9615fb612645a84b389b065be3ba3018d65d82a38ccc2a343" exitCode=0 Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.251792 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfqfs" event={"ID":"9c736886-bb37-4cee-ad32-53a707124bb4","Type":"ContainerDied","Data":"926eacc00992a3f9615fb612645a84b389b065be3ba3018d65d82a38ccc2a343"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.251830 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfqfs" event={"ID":"9c736886-bb37-4cee-ad32-53a707124bb4","Type":"ContainerStarted","Data":"de6fc435ed79938470eca8f02a2f41f2db5f5afcd4d2b746cdfdb29931235767"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.255268 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" event={"ID":"61253815-47e3-4e2c-a2e7-565f128dedef","Type":"ContainerStarted","Data":"4f64c35a169e5643f0a5a82c2dc9a8d09f83ce4932f7abf016ef471976508d6b"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.255618 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.262819 4717 generic.go:334] "Generic (PLEG): container finished" podID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerID="1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee" exitCode=0 Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.262944 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bq8n" event={"ID":"11a20c01-b527-4b1f-8b37-cb369059c70d","Type":"ContainerDied","Data":"1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.263012 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bq8n" event={"ID":"11a20c01-b527-4b1f-8b37-cb369059c70d","Type":"ContainerStarted","Data":"1f7ea01f1413bfd48312ba16bcb64d187ae0ddd19170de9f42b2357588d844a8"} Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.303498 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.304160 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.314325 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.317588 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.323707 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" podStartSLOduration=133.323689432 podStartE2EDuration="2m13.323689432s" podCreationTimestamp="2025-10-02 14:21:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:25.319241083 +0000 UTC m=+156.171095529" watchObservedRunningTime="2025-10-02 14:23:25.323689432 +0000 UTC m=+156.175543868" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.334961 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x7gpq"] Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.336136 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.338280 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.340184 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.372790 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x7gpq"] Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.438700 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.439990 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.541569 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2pdb\" (UniqueName: \"kubernetes.io/projected/cb25e02c-c6e5-4993-8181-ee135ab6f745-kube-api-access-x2pdb\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.541637 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-catalog-content\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.541673 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.541801 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.541911 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-utilities\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.541912 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.558506 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.620273 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.642946 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-catalog-content\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.643035 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-utilities\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.643079 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2pdb\" (UniqueName: \"kubernetes.io/projected/cb25e02c-c6e5-4993-8181-ee135ab6f745-kube-api-access-x2pdb\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.643672 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-catalog-content\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.643709 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-utilities\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.661356 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2pdb\" (UniqueName: \"kubernetes.io/projected/cb25e02c-c6e5-4993-8181-ee135ab6f745-kube-api-access-x2pdb\") pod \"redhat-marketplace-x7gpq\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.712736 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8p84d"] Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.714797 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.728685 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p84d"] Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.826340 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 02 14:23:25 crc kubenswrapper[4717]: W1002 14:23:25.833005 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podeb58ea3b_3294_40dc_bc7d_d62a4f2bdb77.slice/crio-b0889bf4590c9989901ede1dc996cc8da2b256ec6140940ffc709808b2bfdcf1 WatchSource:0}: Error finding container b0889bf4590c9989901ede1dc996cc8da2b256ec6140940ffc709808b2bfdcf1: Status 404 returned error can't find the container with id b0889bf4590c9989901ede1dc996cc8da2b256ec6140940ffc709808b2bfdcf1 Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.845777 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck7x4\" (UniqueName: \"kubernetes.io/projected/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-kube-api-access-ck7x4\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.845820 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-catalog-content\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.845857 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-utilities\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.947297 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-catalog-content\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.947383 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-utilities\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.947478 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck7x4\" (UniqueName: \"kubernetes.io/projected/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-kube-api-access-ck7x4\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.947908 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-catalog-content\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.947986 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-utilities\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.951717 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:23:25 crc kubenswrapper[4717]: I1002 14:23:25.971259 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck7x4\" (UniqueName: \"kubernetes.io/projected/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-kube-api-access-ck7x4\") pod \"redhat-marketplace-8p84d\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.042963 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.141380 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:26 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:26 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:26 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.141432 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.237445 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-wn65n" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.273522 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" event={"ID":"61253815-47e3-4e2c-a2e7-565f128dedef","Type":"ContainerStarted","Data":"1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638"} Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.277881 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77","Type":"ContainerStarted","Data":"7eaa106b765764c159694588e477c50a6aee782d03681b0f6cc069f62ba45bc9"} Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.277995 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77","Type":"ContainerStarted","Data":"b0889bf4590c9989901ede1dc996cc8da2b256ec6140940ffc709808b2bfdcf1"} Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.287396 4717 generic.go:334] "Generic (PLEG): container finished" podID="ad9781fa-330d-4741-b182-0fdf0d1c394d" containerID="28b55c7692ab7018d786ac79ced31971bc1c8134371c112463c74b63bb65cbda" exitCode=0 Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.287488 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" event={"ID":"ad9781fa-330d-4741-b182-0fdf0d1c394d","Type":"ContainerDied","Data":"28b55c7692ab7018d786ac79ced31971bc1c8134371c112463c74b63bb65cbda"} Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.291864 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.291848347 podStartE2EDuration="1.291848347s" podCreationTimestamp="2025-10-02 14:23:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:26.288910488 +0000 UTC m=+157.140764934" watchObservedRunningTime="2025-10-02 14:23:26.291848347 +0000 UTC m=+157.143702793" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.377256 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x7gpq"] Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.558479 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p84d"] Oct 02 14:23:26 crc kubenswrapper[4717]: W1002 14:23:26.575547 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9b9d93e_5750_4704_a4da_cf78b81b8bd1.slice/crio-2a1282560a1936275ddd1223aba3398636d11838a1bd144910564b3114779eaa WatchSource:0}: Error finding container 2a1282560a1936275ddd1223aba3398636d11838a1bd144910564b3114779eaa: Status 404 returned error can't find the container with id 2a1282560a1936275ddd1223aba3398636d11838a1bd144910564b3114779eaa Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.710919 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pcqwp"] Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.712460 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.718059 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.723140 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pcqwp"] Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.865998 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-catalog-content\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.866419 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-utilities\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.866510 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26cfb\" (UniqueName: \"kubernetes.io/projected/5f296026-5d53-41bc-95bf-f949f536b981-kube-api-access-26cfb\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.967913 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-catalog-content\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.968034 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-utilities\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.968085 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26cfb\" (UniqueName: \"kubernetes.io/projected/5f296026-5d53-41bc-95bf-f949f536b981-kube-api-access-26cfb\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.968826 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-utilities\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:26 crc kubenswrapper[4717]: I1002 14:23:26.968962 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-catalog-content\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:26.999406 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26cfb\" (UniqueName: \"kubernetes.io/projected/5f296026-5d53-41bc-95bf-f949f536b981-kube-api-access-26cfb\") pod \"redhat-operators-pcqwp\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.051564 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.051707 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.061101 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.114255 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2ctpb"] Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.116430 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.117538 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2ctpb"] Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.136924 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.140616 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:27 crc kubenswrapper[4717]: [-]has-synced failed: reason withheld Oct 02 14:23:27 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:27 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.140674 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.150536 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.157519 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.157555 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.161515 4717 patch_prober.go:28] interesting pod/console-f9d7485db-r4gdr container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.161559 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-r4gdr" podUID="f2943f5f-caac-4566-b42e-5ef273488f2f" containerName="console" probeResult="failure" output="Get \"https://10.217.0.15:8443/health\": dial tcp 10.217.0.15:8443: connect: connection refused" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.242541 4717 patch_prober.go:28] interesting pod/downloads-7954f5f757-sf5xk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.242622 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sf5xk" podUID="41b77629-2976-425f-b71c-a7f2e9686f11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.242691 4717 patch_prober.go:28] interesting pod/downloads-7954f5f757-sf5xk container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.242750 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-sf5xk" podUID="41b77629-2976-425f-b71c-a7f2e9686f11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.275409 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8nhz\" (UniqueName: \"kubernetes.io/projected/43487213-d8bb-45c6-849b-32be6867aa94-kube-api-access-f8nhz\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.275510 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-utilities\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.280997 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-catalog-content\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.307121 4717 generic.go:334] "Generic (PLEG): container finished" podID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerID="905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0" exitCode=0 Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.307198 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x7gpq" event={"ID":"cb25e02c-c6e5-4993-8181-ee135ab6f745","Type":"ContainerDied","Data":"905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0"} Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.307228 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x7gpq" event={"ID":"cb25e02c-c6e5-4993-8181-ee135ab6f745","Type":"ContainerStarted","Data":"4facf1f8c8d24bd533dccd36a933f7eff9417137214a9bb25117e3cb96186818"} Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.312423 4717 generic.go:334] "Generic (PLEG): container finished" podID="eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77" containerID="7eaa106b765764c159694588e477c50a6aee782d03681b0f6cc069f62ba45bc9" exitCode=0 Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.312495 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77","Type":"ContainerDied","Data":"7eaa106b765764c159694588e477c50a6aee782d03681b0f6cc069f62ba45bc9"} Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.315515 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p84d" event={"ID":"b9b9d93e-5750-4704-a4da-cf78b81b8bd1","Type":"ContainerStarted","Data":"2a1282560a1936275ddd1223aba3398636d11838a1bd144910564b3114779eaa"} Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.320851 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-cqd52" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.383863 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-catalog-content\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.384281 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8nhz\" (UniqueName: \"kubernetes.io/projected/43487213-d8bb-45c6-849b-32be6867aa94-kube-api-access-f8nhz\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.384301 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-utilities\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.384781 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-catalog-content\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.385288 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-utilities\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.427916 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.428609 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8nhz\" (UniqueName: \"kubernetes.io/projected/43487213-d8bb-45c6-849b-32be6867aa94-kube-api-access-f8nhz\") pod \"redhat-operators-2ctpb\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.429895 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.432031 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.434794 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.434982 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.439096 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.590584 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd92a35-4f37-442d-9228-2401d57d015f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2cd92a35-4f37-442d-9228-2401d57d015f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.591070 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd92a35-4f37-442d-9228-2401d57d015f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2cd92a35-4f37-442d-9228-2401d57d015f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.692071 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd92a35-4f37-442d-9228-2401d57d015f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2cd92a35-4f37-442d-9228-2401d57d015f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.692154 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd92a35-4f37-442d-9228-2401d57d015f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2cd92a35-4f37-442d-9228-2401d57d015f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.692781 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd92a35-4f37-442d-9228-2401d57d015f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2cd92a35-4f37-442d-9228-2401d57d015f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.716889 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd92a35-4f37-442d-9228-2401d57d015f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2cd92a35-4f37-442d-9228-2401d57d015f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.750828 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.849076 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2ctpb"] Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.892387 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pcqwp"] Oct 02 14:23:27 crc kubenswrapper[4717]: I1002 14:23:27.979603 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.099030 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad9781fa-330d-4741-b182-0fdf0d1c394d-config-volume\") pod \"ad9781fa-330d-4741-b182-0fdf0d1c394d\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.099103 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgqxl\" (UniqueName: \"kubernetes.io/projected/ad9781fa-330d-4741-b182-0fdf0d1c394d-kube-api-access-rgqxl\") pod \"ad9781fa-330d-4741-b182-0fdf0d1c394d\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.099132 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad9781fa-330d-4741-b182-0fdf0d1c394d-secret-volume\") pod \"ad9781fa-330d-4741-b182-0fdf0d1c394d\" (UID: \"ad9781fa-330d-4741-b182-0fdf0d1c394d\") " Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.101200 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad9781fa-330d-4741-b182-0fdf0d1c394d-config-volume" (OuterVolumeSpecName: "config-volume") pod "ad9781fa-330d-4741-b182-0fdf0d1c394d" (UID: "ad9781fa-330d-4741-b182-0fdf0d1c394d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.105684 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad9781fa-330d-4741-b182-0fdf0d1c394d-kube-api-access-rgqxl" (OuterVolumeSpecName: "kube-api-access-rgqxl") pod "ad9781fa-330d-4741-b182-0fdf0d1c394d" (UID: "ad9781fa-330d-4741-b182-0fdf0d1c394d"). InnerVolumeSpecName "kube-api-access-rgqxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.130464 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad9781fa-330d-4741-b182-0fdf0d1c394d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ad9781fa-330d-4741-b182-0fdf0d1c394d" (UID: "ad9781fa-330d-4741-b182-0fdf0d1c394d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.142006 4717 patch_prober.go:28] interesting pod/router-default-5444994796-l2f4x container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 02 14:23:28 crc kubenswrapper[4717]: [+]has-synced ok Oct 02 14:23:28 crc kubenswrapper[4717]: [+]process-running ok Oct 02 14:23:28 crc kubenswrapper[4717]: healthz check failed Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.142091 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-l2f4x" podUID="d7c8b49f-9023-4cf9-b276-525da51e498d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.176987 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.200481 4717 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad9781fa-330d-4741-b182-0fdf0d1c394d-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.200507 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgqxl\" (UniqueName: \"kubernetes.io/projected/ad9781fa-330d-4741-b182-0fdf0d1c394d-kube-api-access-rgqxl\") on node \"crc\" DevicePath \"\"" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.200517 4717 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad9781fa-330d-4741-b182-0fdf0d1c394d-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.355021 4717 generic.go:334] "Generic (PLEG): container finished" podID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerID="1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25" exitCode=0 Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.355458 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p84d" event={"ID":"b9b9d93e-5750-4704-a4da-cf78b81b8bd1","Type":"ContainerDied","Data":"1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25"} Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.368589 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2cd92a35-4f37-442d-9228-2401d57d015f","Type":"ContainerStarted","Data":"759f4547de23ff2e1e3131c23aef9e3990144c44e819c34b2e67f737176da9aa"} Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.378209 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ctpb" event={"ID":"43487213-d8bb-45c6-849b-32be6867aa94","Type":"ContainerStarted","Data":"9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52"} Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.378257 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ctpb" event={"ID":"43487213-d8bb-45c6-849b-32be6867aa94","Type":"ContainerStarted","Data":"5075b00f86ec9a6e5bcb1cbfd693b62b86176ac8965590247b5b70531cc9a466"} Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.382520 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcqwp" event={"ID":"5f296026-5d53-41bc-95bf-f949f536b981","Type":"ContainerStarted","Data":"fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1"} Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.382547 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcqwp" event={"ID":"5f296026-5d53-41bc-95bf-f949f536b981","Type":"ContainerStarted","Data":"b82fc33cc16adfbd086a036d610790d46e0239870be7e6fc4ae353a9b233cfde"} Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.406478 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.406563 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323575-wfd6r" event={"ID":"ad9781fa-330d-4741-b182-0fdf0d1c394d","Type":"ContainerDied","Data":"0716996e3320282fbc8fed5c582e4de895aa32b8370d7f623e3d504cc1fbb69f"} Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.406593 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0716996e3320282fbc8fed5c582e4de895aa32b8370d7f623e3d504cc1fbb69f" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.657088 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.819631 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kube-api-access\") pod \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\" (UID: \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\") " Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.819821 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kubelet-dir\") pod \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\" (UID: \"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77\") " Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.820281 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77" (UID: "eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.827604 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77" (UID: "eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.922126 4717 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 02 14:23:28 crc kubenswrapper[4717]: I1002 14:23:28.922159 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.141668 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.145310 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-l2f4x" Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.425744 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2cd92a35-4f37-442d-9228-2401d57d015f","Type":"ContainerStarted","Data":"1857b0333beb2f72940bfb2dae142dcf5599524689b00da7f30c768a670230bd"} Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.446034 4717 generic.go:334] "Generic (PLEG): container finished" podID="43487213-d8bb-45c6-849b-32be6867aa94" containerID="9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52" exitCode=0 Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.446107 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ctpb" event={"ID":"43487213-d8bb-45c6-849b-32be6867aa94","Type":"ContainerDied","Data":"9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52"} Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.450742 4717 generic.go:334] "Generic (PLEG): container finished" podID="5f296026-5d53-41bc-95bf-f949f536b981" containerID="fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1" exitCode=0 Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.450800 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcqwp" event={"ID":"5f296026-5d53-41bc-95bf-f949f536b981","Type":"ContainerDied","Data":"fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1"} Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.452097 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.4520745 podStartE2EDuration="2.4520745s" podCreationTimestamp="2025-10-02 14:23:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:23:29.444883946 +0000 UTC m=+160.296738382" watchObservedRunningTime="2025-10-02 14:23:29.4520745 +0000 UTC m=+160.303928946" Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.452955 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.453450 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77","Type":"ContainerDied","Data":"b0889bf4590c9989901ede1dc996cc8da2b256ec6140940ffc709808b2bfdcf1"} Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.453472 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0889bf4590c9989901ede1dc996cc8da2b256ec6140940ffc709808b2bfdcf1" Oct 02 14:23:29 crc kubenswrapper[4717]: I1002 14:23:29.627559 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-xcjdc" Oct 02 14:23:30 crc kubenswrapper[4717]: I1002 14:23:30.465215 4717 generic.go:334] "Generic (PLEG): container finished" podID="2cd92a35-4f37-442d-9228-2401d57d015f" containerID="1857b0333beb2f72940bfb2dae142dcf5599524689b00da7f30c768a670230bd" exitCode=0 Oct 02 14:23:30 crc kubenswrapper[4717]: I1002 14:23:30.465320 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2cd92a35-4f37-442d-9228-2401d57d015f","Type":"ContainerDied","Data":"1857b0333beb2f72940bfb2dae142dcf5599524689b00da7f30c768a670230bd"} Oct 02 14:23:31 crc kubenswrapper[4717]: I1002 14:23:31.836424 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:31 crc kubenswrapper[4717]: I1002 14:23:31.966848 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd92a35-4f37-442d-9228-2401d57d015f-kube-api-access\") pod \"2cd92a35-4f37-442d-9228-2401d57d015f\" (UID: \"2cd92a35-4f37-442d-9228-2401d57d015f\") " Oct 02 14:23:31 crc kubenswrapper[4717]: I1002 14:23:31.966969 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd92a35-4f37-442d-9228-2401d57d015f-kubelet-dir\") pod \"2cd92a35-4f37-442d-9228-2401d57d015f\" (UID: \"2cd92a35-4f37-442d-9228-2401d57d015f\") " Oct 02 14:23:31 crc kubenswrapper[4717]: I1002 14:23:31.967069 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2cd92a35-4f37-442d-9228-2401d57d015f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2cd92a35-4f37-442d-9228-2401d57d015f" (UID: "2cd92a35-4f37-442d-9228-2401d57d015f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:23:31 crc kubenswrapper[4717]: I1002 14:23:31.967324 4717 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2cd92a35-4f37-442d-9228-2401d57d015f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 02 14:23:31 crc kubenswrapper[4717]: I1002 14:23:31.972140 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cd92a35-4f37-442d-9228-2401d57d015f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2cd92a35-4f37-442d-9228-2401d57d015f" (UID: "2cd92a35-4f37-442d-9228-2401d57d015f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:23:32 crc kubenswrapper[4717]: I1002 14:23:32.070502 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2cd92a35-4f37-442d-9228-2401d57d015f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 02 14:23:32 crc kubenswrapper[4717]: I1002 14:23:32.511149 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2cd92a35-4f37-442d-9228-2401d57d015f","Type":"ContainerDied","Data":"759f4547de23ff2e1e3131c23aef9e3990144c44e819c34b2e67f737176da9aa"} Oct 02 14:23:32 crc kubenswrapper[4717]: I1002 14:23:32.511225 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="759f4547de23ff2e1e3131c23aef9e3990144c44e819c34b2e67f737176da9aa" Oct 02 14:23:32 crc kubenswrapper[4717]: I1002 14:23:32.511286 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 02 14:23:33 crc kubenswrapper[4717]: I1002 14:23:33.693526 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:23:33 crc kubenswrapper[4717]: I1002 14:23:33.698685 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/36c8fcb0-a074-461c-a5d1-c01106ee4997-metrics-certs\") pod \"network-metrics-daemon-7v6wt\" (UID: \"36c8fcb0-a074-461c-a5d1-c01106ee4997\") " pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:23:33 crc kubenswrapper[4717]: I1002 14:23:33.859235 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-7v6wt" Oct 02 14:23:37 crc kubenswrapper[4717]: I1002 14:23:37.162873 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:37 crc kubenswrapper[4717]: I1002 14:23:37.172200 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-r4gdr" Oct 02 14:23:37 crc kubenswrapper[4717]: I1002 14:23:37.238111 4717 patch_prober.go:28] interesting pod/downloads-7954f5f757-sf5xk container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 02 14:23:37 crc kubenswrapper[4717]: I1002 14:23:37.238162 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-sf5xk" podUID="41b77629-2976-425f-b71c-a7f2e9686f11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 02 14:23:37 crc kubenswrapper[4717]: I1002 14:23:37.238175 4717 patch_prober.go:28] interesting pod/downloads-7954f5f757-sf5xk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" start-of-body= Oct 02 14:23:37 crc kubenswrapper[4717]: I1002 14:23:37.238251 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-sf5xk" podUID="41b77629-2976-425f-b71c-a7f2e9686f11" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.16:8080/\": dial tcp 10.217.0.16:8080: connect: connection refused" Oct 02 14:23:44 crc kubenswrapper[4717]: I1002 14:23:44.776138 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:23:47 crc kubenswrapper[4717]: I1002 14:23:47.259374 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-sf5xk" Oct 02 14:23:48 crc kubenswrapper[4717]: I1002 14:23:48.620976 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:23:48 crc kubenswrapper[4717]: I1002 14:23:48.621041 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:23:58 crc kubenswrapper[4717]: I1002 14:23:58.038847 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5tf5r" Oct 02 14:23:59 crc kubenswrapper[4717]: I1002 14:23:59.880183 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 02 14:24:03 crc kubenswrapper[4717]: E1002 14:24:03.507500 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 02 14:24:03 crc kubenswrapper[4717]: E1002 14:24:03.508040 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f8nhz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-2ctpb_openshift-marketplace(43487213-d8bb-45c6-849b-32be6867aa94): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 14:24:03 crc kubenswrapper[4717]: E1002 14:24:03.509352 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-2ctpb" podUID="43487213-d8bb-45c6-849b-32be6867aa94" Oct 02 14:24:04 crc kubenswrapper[4717]: E1002 14:24:04.696349 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-2ctpb" podUID="43487213-d8bb-45c6-849b-32be6867aa94" Oct 02 14:24:09 crc kubenswrapper[4717]: E1002 14:24:09.323324 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 02 14:24:09 crc kubenswrapper[4717]: E1002 14:24:09.323755 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6lq5n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-wrvgq_openshift-marketplace(5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 14:24:09 crc kubenswrapper[4717]: E1002 14:24:09.324959 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-wrvgq" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" Oct 02 14:24:11 crc kubenswrapper[4717]: E1002 14:24:11.146227 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-wrvgq" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" Oct 02 14:24:11 crc kubenswrapper[4717]: E1002 14:24:11.205839 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 02 14:24:11 crc kubenswrapper[4717]: E1002 14:24:11.206013 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-26cfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-pcqwp_openshift-marketplace(5f296026-5d53-41bc-95bf-f949f536b981): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 14:24:11 crc kubenswrapper[4717]: E1002 14:24:11.207221 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-pcqwp" podUID="5f296026-5d53-41bc-95bf-f949f536b981" Oct 02 14:24:13 crc kubenswrapper[4717]: E1002 14:24:13.880242 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 02 14:24:13 crc kubenswrapper[4717]: E1002 14:24:13.880652 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vzcqd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-8bq8n_openshift-marketplace(11a20c01-b527-4b1f-8b37-cb369059c70d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 14:24:13 crc kubenswrapper[4717]: E1002 14:24:13.881815 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-8bq8n" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" Oct 02 14:24:16 crc kubenswrapper[4717]: E1002 14:24:16.222781 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 02 14:24:16 crc kubenswrapper[4717]: E1002 14:24:16.223060 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tr2sk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pfqfs_openshift-marketplace(9c736886-bb37-4cee-ad32-53a707124bb4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 14:24:16 crc kubenswrapper[4717]: E1002 14:24:16.225237 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-pfqfs" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" Oct 02 14:24:18 crc kubenswrapper[4717]: I1002 14:24:18.620335 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:24:18 crc kubenswrapper[4717]: I1002 14:24:18.620659 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:24:18 crc kubenswrapper[4717]: I1002 14:24:18.620702 4717 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:24:18 crc kubenswrapper[4717]: I1002 14:24:18.621238 4717 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134"} pod="openshift-machine-config-operator/machine-config-daemon-sk55f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 14:24:18 crc kubenswrapper[4717]: I1002 14:24:18.621332 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" containerID="cri-o://848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134" gracePeriod=600 Oct 02 14:24:20 crc kubenswrapper[4717]: E1002 14:24:20.123054 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pfqfs" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" Oct 02 14:24:20 crc kubenswrapper[4717]: E1002 14:24:20.123210 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8bq8n" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" Oct 02 14:24:20 crc kubenswrapper[4717]: E1002 14:24:20.123609 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-pcqwp" podUID="5f296026-5d53-41bc-95bf-f949f536b981" Oct 02 14:24:20 crc kubenswrapper[4717]: E1002 14:24:20.199345 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 02 14:24:20 crc kubenswrapper[4717]: E1002 14:24:20.199511 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6n6wq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-kgwg8_openshift-marketplace(54c7bec6-8a7a-4c8f-bb92-3280de831120): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 14:24:20 crc kubenswrapper[4717]: E1002 14:24:20.200755 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-kgwg8" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" Oct 02 14:24:20 crc kubenswrapper[4717]: I1002 14:24:20.771011 4717 generic.go:334] "Generic (PLEG): container finished" podID="405aba30-0ff3-4fca-a5da-09c35263665d" containerID="848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134" exitCode=0 Oct 02 14:24:20 crc kubenswrapper[4717]: I1002 14:24:20.771142 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerDied","Data":"848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134"} Oct 02 14:24:25 crc kubenswrapper[4717]: E1002 14:24:25.072514 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-kgwg8" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" Oct 02 14:24:27 crc kubenswrapper[4717]: E1002 14:24:27.120373 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 02 14:24:27 crc kubenswrapper[4717]: E1002 14:24:27.120800 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x2pdb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-x7gpq_openshift-marketplace(cb25e02c-c6e5-4993-8181-ee135ab6f745): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 14:24:27 crc kubenswrapper[4717]: E1002 14:24:27.121898 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-x7gpq" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" Oct 02 14:24:27 crc kubenswrapper[4717]: E1002 14:24:27.121935 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 02 14:24:27 crc kubenswrapper[4717]: E1002 14:24:27.122107 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ck7x4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-8p84d_openshift-marketplace(b9b9d93e-5750-4704-a4da-cf78b81b8bd1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 02 14:24:27 crc kubenswrapper[4717]: E1002 14:24:27.123168 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-8p84d" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" Oct 02 14:24:27 crc kubenswrapper[4717]: I1002 14:24:27.435829 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-7v6wt"] Oct 02 14:24:27 crc kubenswrapper[4717]: W1002 14:24:27.440862 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36c8fcb0_a074_461c_a5d1_c01106ee4997.slice/crio-1a4f839fb94078197038edf6f94fd166cec4c3c12d77a139c52288e0d95f0d64 WatchSource:0}: Error finding container 1a4f839fb94078197038edf6f94fd166cec4c3c12d77a139c52288e0d95f0d64: Status 404 returned error can't find the container with id 1a4f839fb94078197038edf6f94fd166cec4c3c12d77a139c52288e0d95f0d64 Oct 02 14:24:27 crc kubenswrapper[4717]: I1002 14:24:27.827280 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"cf59a8736e6c20aa8e883ecce2166ade5bc372043dd61962e7279881460fe4eb"} Oct 02 14:24:27 crc kubenswrapper[4717]: I1002 14:24:27.832235 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ctpb" event={"ID":"43487213-d8bb-45c6-849b-32be6867aa94","Type":"ContainerDied","Data":"c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646"} Oct 02 14:24:27 crc kubenswrapper[4717]: I1002 14:24:27.832170 4717 generic.go:334] "Generic (PLEG): container finished" podID="43487213-d8bb-45c6-849b-32be6867aa94" containerID="c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646" exitCode=0 Oct 02 14:24:27 crc kubenswrapper[4717]: I1002 14:24:27.840336 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" event={"ID":"36c8fcb0-a074-461c-a5d1-c01106ee4997","Type":"ContainerStarted","Data":"77f152a009c2fc080649489f19f60e1abb784368c76d2ce74592760e0d00e7de"} Oct 02 14:24:27 crc kubenswrapper[4717]: I1002 14:24:27.841368 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" event={"ID":"36c8fcb0-a074-461c-a5d1-c01106ee4997","Type":"ContainerStarted","Data":"1a4f839fb94078197038edf6f94fd166cec4c3c12d77a139c52288e0d95f0d64"} Oct 02 14:24:27 crc kubenswrapper[4717]: E1002 14:24:27.845179 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-8p84d" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" Oct 02 14:24:27 crc kubenswrapper[4717]: E1002 14:24:27.853014 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-x7gpq" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" Oct 02 14:24:28 crc kubenswrapper[4717]: I1002 14:24:28.850123 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ctpb" event={"ID":"43487213-d8bb-45c6-849b-32be6867aa94","Type":"ContainerStarted","Data":"8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593"} Oct 02 14:24:28 crc kubenswrapper[4717]: I1002 14:24:28.852107 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-7v6wt" event={"ID":"36c8fcb0-a074-461c-a5d1-c01106ee4997","Type":"ContainerStarted","Data":"754ad94f3653154578e867afbbef65345c0841221295740969a9b8974266f2fe"} Oct 02 14:24:28 crc kubenswrapper[4717]: I1002 14:24:28.872846 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2ctpb" podStartSLOduration=1.71682808 podStartE2EDuration="1m1.87279956s" podCreationTimestamp="2025-10-02 14:23:27 +0000 UTC" firstStartedPulling="2025-10-02 14:23:28.379761086 +0000 UTC m=+159.231615532" lastFinishedPulling="2025-10-02 14:24:28.535732566 +0000 UTC m=+219.387587012" observedRunningTime="2025-10-02 14:24:28.868986469 +0000 UTC m=+219.720840915" watchObservedRunningTime="2025-10-02 14:24:28.87279956 +0000 UTC m=+219.724653996" Oct 02 14:24:30 crc kubenswrapper[4717]: I1002 14:24:30.867224 4717 generic.go:334] "Generic (PLEG): container finished" podID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerID="4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec" exitCode=0 Oct 02 14:24:30 crc kubenswrapper[4717]: I1002 14:24:30.867312 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrvgq" event={"ID":"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def","Type":"ContainerDied","Data":"4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec"} Oct 02 14:24:30 crc kubenswrapper[4717]: I1002 14:24:30.885969 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-7v6wt" podStartSLOduration=199.885936036 podStartE2EDuration="3m19.885936036s" podCreationTimestamp="2025-10-02 14:21:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:24:28.886587444 +0000 UTC m=+219.738441890" watchObservedRunningTime="2025-10-02 14:24:30.885936036 +0000 UTC m=+221.737790482" Oct 02 14:24:31 crc kubenswrapper[4717]: I1002 14:24:31.874699 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrvgq" event={"ID":"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def","Type":"ContainerStarted","Data":"75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b"} Oct 02 14:24:31 crc kubenswrapper[4717]: I1002 14:24:31.893553 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wrvgq" podStartSLOduration=2.7980606789999998 podStartE2EDuration="1m8.89353707s" podCreationTimestamp="2025-10-02 14:23:23 +0000 UTC" firstStartedPulling="2025-10-02 14:23:25.250510403 +0000 UTC m=+156.102364859" lastFinishedPulling="2025-10-02 14:24:31.345986804 +0000 UTC m=+222.197841250" observedRunningTime="2025-10-02 14:24:31.89010344 +0000 UTC m=+222.741957886" watchObservedRunningTime="2025-10-02 14:24:31.89353707 +0000 UTC m=+222.745391516" Oct 02 14:24:33 crc kubenswrapper[4717]: I1002 14:24:33.663391 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:24:33 crc kubenswrapper[4717]: I1002 14:24:33.663741 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:24:33 crc kubenswrapper[4717]: I1002 14:24:33.968297 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:24:35 crc kubenswrapper[4717]: I1002 14:24:35.896767 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bq8n" event={"ID":"11a20c01-b527-4b1f-8b37-cb369059c70d","Type":"ContainerStarted","Data":"b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7"} Oct 02 14:24:35 crc kubenswrapper[4717]: I1002 14:24:35.899576 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfqfs" event={"ID":"9c736886-bb37-4cee-ad32-53a707124bb4","Type":"ContainerStarted","Data":"ab2135f39b74069b751e2b60a342c6da09866c1f5f95296eb5589b8ed46992f2"} Oct 02 14:24:36 crc kubenswrapper[4717]: I1002 14:24:36.906028 4717 generic.go:334] "Generic (PLEG): container finished" podID="9c736886-bb37-4cee-ad32-53a707124bb4" containerID="ab2135f39b74069b751e2b60a342c6da09866c1f5f95296eb5589b8ed46992f2" exitCode=0 Oct 02 14:24:36 crc kubenswrapper[4717]: I1002 14:24:36.906117 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfqfs" event={"ID":"9c736886-bb37-4cee-ad32-53a707124bb4","Type":"ContainerDied","Data":"ab2135f39b74069b751e2b60a342c6da09866c1f5f95296eb5589b8ed46992f2"} Oct 02 14:24:36 crc kubenswrapper[4717]: I1002 14:24:36.908471 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcqwp" event={"ID":"5f296026-5d53-41bc-95bf-f949f536b981","Type":"ContainerStarted","Data":"5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3"} Oct 02 14:24:36 crc kubenswrapper[4717]: I1002 14:24:36.910881 4717 generic.go:334] "Generic (PLEG): container finished" podID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerID="b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7" exitCode=0 Oct 02 14:24:36 crc kubenswrapper[4717]: I1002 14:24:36.910918 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bq8n" event={"ID":"11a20c01-b527-4b1f-8b37-cb369059c70d","Type":"ContainerDied","Data":"b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7"} Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.439864 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.441591 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.497489 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.924394 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bq8n" event={"ID":"11a20c01-b527-4b1f-8b37-cb369059c70d","Type":"ContainerStarted","Data":"5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd"} Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.927337 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfqfs" event={"ID":"9c736886-bb37-4cee-ad32-53a707124bb4","Type":"ContainerStarted","Data":"4303a082fcaa0a45bb47183fb0d9790de2d34ed14438b75be7c255062f3ec2e6"} Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.929048 4717 generic.go:334] "Generic (PLEG): container finished" podID="5f296026-5d53-41bc-95bf-f949f536b981" containerID="5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3" exitCode=0 Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.929142 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcqwp" event={"ID":"5f296026-5d53-41bc-95bf-f949f536b981","Type":"ContainerDied","Data":"5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3"} Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.943257 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8bq8n" podStartSLOduration=2.671613229 podStartE2EDuration="1m14.943238449s" podCreationTimestamp="2025-10-02 14:23:23 +0000 UTC" firstStartedPulling="2025-10-02 14:23:25.265628958 +0000 UTC m=+156.117483404" lastFinishedPulling="2025-10-02 14:24:37.537254178 +0000 UTC m=+228.389108624" observedRunningTime="2025-10-02 14:24:37.93950008 +0000 UTC m=+228.791354526" watchObservedRunningTime="2025-10-02 14:24:37.943238449 +0000 UTC m=+228.795092885" Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.970864 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:24:37 crc kubenswrapper[4717]: I1002 14:24:37.983915 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pfqfs" podStartSLOduration=2.842906804 podStartE2EDuration="1m14.983897702s" podCreationTimestamp="2025-10-02 14:23:23 +0000 UTC" firstStartedPulling="2025-10-02 14:23:25.253342799 +0000 UTC m=+156.105197235" lastFinishedPulling="2025-10-02 14:24:37.394333677 +0000 UTC m=+228.246188133" observedRunningTime="2025-10-02 14:24:37.98269422 +0000 UTC m=+228.834548666" watchObservedRunningTime="2025-10-02 14:24:37.983897702 +0000 UTC m=+228.835752148" Oct 02 14:24:39 crc kubenswrapper[4717]: I1002 14:24:39.336641 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2ctpb"] Oct 02 14:24:40 crc kubenswrapper[4717]: I1002 14:24:40.943488 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgwg8" event={"ID":"54c7bec6-8a7a-4c8f-bb92-3280de831120","Type":"ContainerStarted","Data":"8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe"} Oct 02 14:24:40 crc kubenswrapper[4717]: I1002 14:24:40.946021 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2ctpb" podUID="43487213-d8bb-45c6-849b-32be6867aa94" containerName="registry-server" containerID="cri-o://8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593" gracePeriod=2 Oct 02 14:24:40 crc kubenswrapper[4717]: I1002 14:24:40.946111 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcqwp" event={"ID":"5f296026-5d53-41bc-95bf-f949f536b981","Type":"ContainerStarted","Data":"37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7"} Oct 02 14:24:40 crc kubenswrapper[4717]: I1002 14:24:40.983290 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pcqwp" podStartSLOduration=2.687573329 podStartE2EDuration="1m14.983272919s" podCreationTimestamp="2025-10-02 14:23:26 +0000 UTC" firstStartedPulling="2025-10-02 14:23:28.383988299 +0000 UTC m=+159.235842745" lastFinishedPulling="2025-10-02 14:24:40.679687889 +0000 UTC m=+231.531542335" observedRunningTime="2025-10-02 14:24:40.982138589 +0000 UTC m=+231.833993035" watchObservedRunningTime="2025-10-02 14:24:40.983272919 +0000 UTC m=+231.835127365" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.256669 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.432517 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8nhz\" (UniqueName: \"kubernetes.io/projected/43487213-d8bb-45c6-849b-32be6867aa94-kube-api-access-f8nhz\") pod \"43487213-d8bb-45c6-849b-32be6867aa94\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.432588 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-catalog-content\") pod \"43487213-d8bb-45c6-849b-32be6867aa94\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.432621 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-utilities\") pod \"43487213-d8bb-45c6-849b-32be6867aa94\" (UID: \"43487213-d8bb-45c6-849b-32be6867aa94\") " Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.433583 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-utilities" (OuterVolumeSpecName: "utilities") pod "43487213-d8bb-45c6-849b-32be6867aa94" (UID: "43487213-d8bb-45c6-849b-32be6867aa94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.437873 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43487213-d8bb-45c6-849b-32be6867aa94-kube-api-access-f8nhz" (OuterVolumeSpecName: "kube-api-access-f8nhz") pod "43487213-d8bb-45c6-849b-32be6867aa94" (UID: "43487213-d8bb-45c6-849b-32be6867aa94"). InnerVolumeSpecName "kube-api-access-f8nhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.516600 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43487213-d8bb-45c6-849b-32be6867aa94" (UID: "43487213-d8bb-45c6-849b-32be6867aa94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.533822 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8nhz\" (UniqueName: \"kubernetes.io/projected/43487213-d8bb-45c6-849b-32be6867aa94-kube-api-access-f8nhz\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.533866 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.533877 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43487213-d8bb-45c6-849b-32be6867aa94-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.953173 4717 generic.go:334] "Generic (PLEG): container finished" podID="43487213-d8bb-45c6-849b-32be6867aa94" containerID="8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593" exitCode=0 Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.953235 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ctpb" event={"ID":"43487213-d8bb-45c6-849b-32be6867aa94","Type":"ContainerDied","Data":"8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593"} Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.953262 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2ctpb" event={"ID":"43487213-d8bb-45c6-849b-32be6867aa94","Type":"ContainerDied","Data":"5075b00f86ec9a6e5bcb1cbfd693b62b86176ac8965590247b5b70531cc9a466"} Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.953278 4717 scope.go:117] "RemoveContainer" containerID="8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.953278 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2ctpb" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.955098 4717 generic.go:334] "Generic (PLEG): container finished" podID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerID="8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe" exitCode=0 Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.955133 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgwg8" event={"ID":"54c7bec6-8a7a-4c8f-bb92-3280de831120","Type":"ContainerDied","Data":"8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe"} Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.958616 4717 generic.go:334] "Generic (PLEG): container finished" podID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerID="4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34" exitCode=0 Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.958644 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p84d" event={"ID":"b9b9d93e-5750-4704-a4da-cf78b81b8bd1","Type":"ContainerDied","Data":"4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34"} Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.973178 4717 scope.go:117] "RemoveContainer" containerID="c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646" Oct 02 14:24:41 crc kubenswrapper[4717]: I1002 14:24:41.991060 4717 scope.go:117] "RemoveContainer" containerID="9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52" Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.006616 4717 scope.go:117] "RemoveContainer" containerID="8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593" Oct 02 14:24:42 crc kubenswrapper[4717]: E1002 14:24:42.007013 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593\": container with ID starting with 8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593 not found: ID does not exist" containerID="8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593" Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.007069 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593"} err="failed to get container status \"8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593\": rpc error: code = NotFound desc = could not find container \"8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593\": container with ID starting with 8e35eb427a841c9a504d0825adcf5d2aaf293afa100e0c501a3addcf85593593 not found: ID does not exist" Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.007096 4717 scope.go:117] "RemoveContainer" containerID="c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646" Oct 02 14:24:42 crc kubenswrapper[4717]: E1002 14:24:42.007370 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646\": container with ID starting with c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646 not found: ID does not exist" containerID="c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646" Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.007411 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646"} err="failed to get container status \"c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646\": rpc error: code = NotFound desc = could not find container \"c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646\": container with ID starting with c3c5f378a0fcbb7db1fdab7f58c77b42003c87bcd1843d0391dd89494a8fb646 not found: ID does not exist" Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.007434 4717 scope.go:117] "RemoveContainer" containerID="9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52" Oct 02 14:24:42 crc kubenswrapper[4717]: E1002 14:24:42.007698 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52\": container with ID starting with 9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52 not found: ID does not exist" containerID="9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52" Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.007726 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52"} err="failed to get container status \"9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52\": rpc error: code = NotFound desc = could not find container \"9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52\": container with ID starting with 9fb31e7233ba4a651c1cc80a9e0986baacceb626c8dc99e41547d60893bdbd52 not found: ID does not exist" Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.020770 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2ctpb"] Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.026805 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2ctpb"] Oct 02 14:24:42 crc kubenswrapper[4717]: I1002 14:24:42.862541 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43487213-d8bb-45c6-849b-32be6867aa94" path="/var/lib/kubelet/pods/43487213-d8bb-45c6-849b-32be6867aa94/volumes" Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.709315 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.862957 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.863384 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.906081 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.972794 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgwg8" event={"ID":"54c7bec6-8a7a-4c8f-bb92-3280de831120","Type":"ContainerStarted","Data":"11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7"} Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.974812 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p84d" event={"ID":"b9b9d93e-5750-4704-a4da-cf78b81b8bd1","Type":"ContainerStarted","Data":"c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59"} Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.976532 4717 generic.go:334] "Generic (PLEG): container finished" podID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerID="70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b" exitCode=0 Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.976617 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x7gpq" event={"ID":"cb25e02c-c6e5-4993-8181-ee135ab6f745","Type":"ContainerDied","Data":"70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b"} Oct 02 14:24:43 crc kubenswrapper[4717]: I1002 14:24:43.993395 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kgwg8" podStartSLOduration=3.197575889 podStartE2EDuration="1m20.99337565s" podCreationTimestamp="2025-10-02 14:23:23 +0000 UTC" firstStartedPulling="2025-10-02 14:23:25.24668525 +0000 UTC m=+156.098539716" lastFinishedPulling="2025-10-02 14:24:43.042485031 +0000 UTC m=+233.894339477" observedRunningTime="2025-10-02 14:24:43.992625531 +0000 UTC m=+234.844479987" watchObservedRunningTime="2025-10-02 14:24:43.99337565 +0000 UTC m=+234.845230096" Oct 02 14:24:44 crc kubenswrapper[4717]: I1002 14:24:44.025107 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:24:44 crc kubenswrapper[4717]: I1002 14:24:44.030887 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8p84d" podStartSLOduration=4.280544183 podStartE2EDuration="1m19.030862069s" podCreationTimestamp="2025-10-02 14:23:25 +0000 UTC" firstStartedPulling="2025-10-02 14:23:28.357361286 +0000 UTC m=+159.209215732" lastFinishedPulling="2025-10-02 14:24:43.107679172 +0000 UTC m=+233.959533618" observedRunningTime="2025-10-02 14:24:44.030301025 +0000 UTC m=+234.882155481" watchObservedRunningTime="2025-10-02 14:24:44.030862069 +0000 UTC m=+234.882716525" Oct 02 14:24:44 crc kubenswrapper[4717]: I1002 14:24:44.040242 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:24:44 crc kubenswrapper[4717]: I1002 14:24:44.040299 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:24:44 crc kubenswrapper[4717]: I1002 14:24:44.531502 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:24:44 crc kubenswrapper[4717]: I1002 14:24:44.532089 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:24:44 crc kubenswrapper[4717]: I1002 14:24:44.573817 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:24:44 crc kubenswrapper[4717]: I1002 14:24:44.985873 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x7gpq" event={"ID":"cb25e02c-c6e5-4993-8181-ee135ab6f745","Type":"ContainerStarted","Data":"97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d"} Oct 02 14:24:45 crc kubenswrapper[4717]: I1002 14:24:45.005651 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x7gpq" podStartSLOduration=2.839070651 podStartE2EDuration="1m20.005631308s" podCreationTimestamp="2025-10-02 14:23:25 +0000 UTC" firstStartedPulling="2025-10-02 14:23:27.311180713 +0000 UTC m=+158.163035159" lastFinishedPulling="2025-10-02 14:24:44.47774137 +0000 UTC m=+235.329595816" observedRunningTime="2025-10-02 14:24:45.005131845 +0000 UTC m=+235.856986311" watchObservedRunningTime="2025-10-02 14:24:45.005631308 +0000 UTC m=+235.857485754" Oct 02 14:24:45 crc kubenswrapper[4717]: I1002 14:24:45.028011 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:24:45 crc kubenswrapper[4717]: I1002 14:24:45.078540 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-kgwg8" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="registry-server" probeResult="failure" output=< Oct 02 14:24:45 crc kubenswrapper[4717]: timeout: failed to connect service ":50051" within 1s Oct 02 14:24:45 crc kubenswrapper[4717]: > Oct 02 14:24:45 crc kubenswrapper[4717]: I1002 14:24:45.952305 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:24:45 crc kubenswrapper[4717]: I1002 14:24:45.952501 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:24:46 crc kubenswrapper[4717]: I1002 14:24:46.008694 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:24:46 crc kubenswrapper[4717]: I1002 14:24:46.044088 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:24:46 crc kubenswrapper[4717]: I1002 14:24:46.044188 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:24:46 crc kubenswrapper[4717]: I1002 14:24:46.090242 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:24:47 crc kubenswrapper[4717]: I1002 14:24:47.151375 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:24:47 crc kubenswrapper[4717]: I1002 14:24:47.151812 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:24:47 crc kubenswrapper[4717]: I1002 14:24:47.212268 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:24:47 crc kubenswrapper[4717]: I1002 14:24:47.730693 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pfqfs"] Oct 02 14:24:48 crc kubenswrapper[4717]: I1002 14:24:48.009215 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pfqfs" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" containerName="registry-server" containerID="cri-o://4303a082fcaa0a45bb47183fb0d9790de2d34ed14438b75be7c255062f3ec2e6" gracePeriod=2 Oct 02 14:24:48 crc kubenswrapper[4717]: I1002 14:24:48.049011 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.016098 4717 generic.go:334] "Generic (PLEG): container finished" podID="9c736886-bb37-4cee-ad32-53a707124bb4" containerID="4303a082fcaa0a45bb47183fb0d9790de2d34ed14438b75be7c255062f3ec2e6" exitCode=0 Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.016218 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfqfs" event={"ID":"9c736886-bb37-4cee-ad32-53a707124bb4","Type":"ContainerDied","Data":"4303a082fcaa0a45bb47183fb0d9790de2d34ed14438b75be7c255062f3ec2e6"} Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.236198 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.331747 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-utilities\") pod \"9c736886-bb37-4cee-ad32-53a707124bb4\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.331836 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tr2sk\" (UniqueName: \"kubernetes.io/projected/9c736886-bb37-4cee-ad32-53a707124bb4-kube-api-access-tr2sk\") pod \"9c736886-bb37-4cee-ad32-53a707124bb4\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.331860 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-catalog-content\") pod \"9c736886-bb37-4cee-ad32-53a707124bb4\" (UID: \"9c736886-bb37-4cee-ad32-53a707124bb4\") " Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.332594 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-utilities" (OuterVolumeSpecName: "utilities") pod "9c736886-bb37-4cee-ad32-53a707124bb4" (UID: "9c736886-bb37-4cee-ad32-53a707124bb4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.337715 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c736886-bb37-4cee-ad32-53a707124bb4-kube-api-access-tr2sk" (OuterVolumeSpecName: "kube-api-access-tr2sk") pod "9c736886-bb37-4cee-ad32-53a707124bb4" (UID: "9c736886-bb37-4cee-ad32-53a707124bb4"). InnerVolumeSpecName "kube-api-access-tr2sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.380062 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c736886-bb37-4cee-ad32-53a707124bb4" (UID: "9c736886-bb37-4cee-ad32-53a707124bb4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.433758 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.433796 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tr2sk\" (UniqueName: \"kubernetes.io/projected/9c736886-bb37-4cee-ad32-53a707124bb4-kube-api-access-tr2sk\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:49 crc kubenswrapper[4717]: I1002 14:24:49.433810 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c736886-bb37-4cee-ad32-53a707124bb4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:50 crc kubenswrapper[4717]: I1002 14:24:50.022407 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pfqfs" event={"ID":"9c736886-bb37-4cee-ad32-53a707124bb4","Type":"ContainerDied","Data":"de6fc435ed79938470eca8f02a2f41f2db5f5afcd4d2b746cdfdb29931235767"} Oct 02 14:24:50 crc kubenswrapper[4717]: I1002 14:24:50.022675 4717 scope.go:117] "RemoveContainer" containerID="4303a082fcaa0a45bb47183fb0d9790de2d34ed14438b75be7c255062f3ec2e6" Oct 02 14:24:50 crc kubenswrapper[4717]: I1002 14:24:50.022470 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pfqfs" Oct 02 14:24:50 crc kubenswrapper[4717]: I1002 14:24:50.037592 4717 scope.go:117] "RemoveContainer" containerID="ab2135f39b74069b751e2b60a342c6da09866c1f5f95296eb5589b8ed46992f2" Oct 02 14:24:50 crc kubenswrapper[4717]: I1002 14:24:50.046869 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pfqfs"] Oct 02 14:24:50 crc kubenswrapper[4717]: I1002 14:24:50.049965 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pfqfs"] Oct 02 14:24:50 crc kubenswrapper[4717]: I1002 14:24:50.081888 4717 scope.go:117] "RemoveContainer" containerID="926eacc00992a3f9615fb612645a84b389b065be3ba3018d65d82a38ccc2a343" Oct 02 14:24:50 crc kubenswrapper[4717]: I1002 14:24:50.846889 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" path="/var/lib/kubelet/pods/9c736886-bb37-4cee-ad32-53a707124bb4/volumes" Oct 02 14:24:54 crc kubenswrapper[4717]: I1002 14:24:54.072975 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:24:54 crc kubenswrapper[4717]: I1002 14:24:54.107983 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:24:55 crc kubenswrapper[4717]: I1002 14:24:55.731435 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kgwg8"] Oct 02 14:24:55 crc kubenswrapper[4717]: I1002 14:24:55.989649 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:24:56 crc kubenswrapper[4717]: I1002 14:24:56.050724 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kgwg8" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="registry-server" containerID="cri-o://11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7" gracePeriod=2 Oct 02 14:24:56 crc kubenswrapper[4717]: I1002 14:24:56.081114 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:24:56 crc kubenswrapper[4717]: I1002 14:24:56.989053 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.056971 4717 generic.go:334] "Generic (PLEG): container finished" podID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerID="11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7" exitCode=0 Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.057011 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgwg8" event={"ID":"54c7bec6-8a7a-4c8f-bb92-3280de831120","Type":"ContainerDied","Data":"11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7"} Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.057036 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kgwg8" event={"ID":"54c7bec6-8a7a-4c8f-bb92-3280de831120","Type":"ContainerDied","Data":"8a3a92ca0a6d6249ea5b350c24e23f0c8d1e3d29fa47a623627745ff9e20b478"} Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.057053 4717 scope.go:117] "RemoveContainer" containerID="11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.057144 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kgwg8" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.070672 4717 scope.go:117] "RemoveContainer" containerID="8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.091371 4717 scope.go:117] "RemoveContainer" containerID="f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.102296 4717 scope.go:117] "RemoveContainer" containerID="11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7" Oct 02 14:24:57 crc kubenswrapper[4717]: E1002 14:24:57.102615 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7\": container with ID starting with 11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7 not found: ID does not exist" containerID="11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.102708 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7"} err="failed to get container status \"11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7\": rpc error: code = NotFound desc = could not find container \"11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7\": container with ID starting with 11ff717e8ce6aadf7292d8ebfc53b39806199ff06ce69358afbce3d898cf35c7 not found: ID does not exist" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.102791 4717 scope.go:117] "RemoveContainer" containerID="8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe" Oct 02 14:24:57 crc kubenswrapper[4717]: E1002 14:24:57.103102 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe\": container with ID starting with 8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe not found: ID does not exist" containerID="8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.103122 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe"} err="failed to get container status \"8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe\": rpc error: code = NotFound desc = could not find container \"8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe\": container with ID starting with 8eb9166596dd6ca364aba7e207f3dd6b54da6f076a80598ba40902c5c5793afe not found: ID does not exist" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.103135 4717 scope.go:117] "RemoveContainer" containerID="f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0" Oct 02 14:24:57 crc kubenswrapper[4717]: E1002 14:24:57.103399 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0\": container with ID starting with f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0 not found: ID does not exist" containerID="f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.103419 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0"} err="failed to get container status \"f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0\": rpc error: code = NotFound desc = could not find container \"f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0\": container with ID starting with f26bcba1e7638c9d208950ae24351cfa7ed77aabc64b880ad2c326e298b615e0 not found: ID does not exist" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.154243 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-catalog-content\") pod \"54c7bec6-8a7a-4c8f-bb92-3280de831120\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.154497 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n6wq\" (UniqueName: \"kubernetes.io/projected/54c7bec6-8a7a-4c8f-bb92-3280de831120-kube-api-access-6n6wq\") pod \"54c7bec6-8a7a-4c8f-bb92-3280de831120\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.154547 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-utilities\") pod \"54c7bec6-8a7a-4c8f-bb92-3280de831120\" (UID: \"54c7bec6-8a7a-4c8f-bb92-3280de831120\") " Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.155428 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-utilities" (OuterVolumeSpecName: "utilities") pod "54c7bec6-8a7a-4c8f-bb92-3280de831120" (UID: "54c7bec6-8a7a-4c8f-bb92-3280de831120"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.159767 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54c7bec6-8a7a-4c8f-bb92-3280de831120-kube-api-access-6n6wq" (OuterVolumeSpecName: "kube-api-access-6n6wq") pod "54c7bec6-8a7a-4c8f-bb92-3280de831120" (UID: "54c7bec6-8a7a-4c8f-bb92-3280de831120"). InnerVolumeSpecName "kube-api-access-6n6wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.195288 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "54c7bec6-8a7a-4c8f-bb92-3280de831120" (UID: "54c7bec6-8a7a-4c8f-bb92-3280de831120"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.255438 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.255470 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54c7bec6-8a7a-4c8f-bb92-3280de831120-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.255480 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n6wq\" (UniqueName: \"kubernetes.io/projected/54c7bec6-8a7a-4c8f-bb92-3280de831120-kube-api-access-6n6wq\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.388262 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kgwg8"] Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.390718 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kgwg8"] Oct 02 14:24:57 crc kubenswrapper[4717]: I1002 14:24:57.513190 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qmztn"] Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.130076 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p84d"] Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.131100 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8p84d" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerName="registry-server" containerID="cri-o://c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59" gracePeriod=2 Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.435306 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.569198 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck7x4\" (UniqueName: \"kubernetes.io/projected/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-kube-api-access-ck7x4\") pod \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.569246 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-catalog-content\") pod \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.569337 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-utilities\") pod \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\" (UID: \"b9b9d93e-5750-4704-a4da-cf78b81b8bd1\") " Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.570365 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-utilities" (OuterVolumeSpecName: "utilities") pod "b9b9d93e-5750-4704-a4da-cf78b81b8bd1" (UID: "b9b9d93e-5750-4704-a4da-cf78b81b8bd1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.574111 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-kube-api-access-ck7x4" (OuterVolumeSpecName: "kube-api-access-ck7x4") pod "b9b9d93e-5750-4704-a4da-cf78b81b8bd1" (UID: "b9b9d93e-5750-4704-a4da-cf78b81b8bd1"). InnerVolumeSpecName "kube-api-access-ck7x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.584117 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b9b9d93e-5750-4704-a4da-cf78b81b8bd1" (UID: "b9b9d93e-5750-4704-a4da-cf78b81b8bd1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.671149 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.671185 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck7x4\" (UniqueName: \"kubernetes.io/projected/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-kube-api-access-ck7x4\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.671198 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9b9d93e-5750-4704-a4da-cf78b81b8bd1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:24:58 crc kubenswrapper[4717]: I1002 14:24:58.846031 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" path="/var/lib/kubelet/pods/54c7bec6-8a7a-4c8f-bb92-3280de831120/volumes" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.070454 4717 generic.go:334] "Generic (PLEG): container finished" podID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerID="c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59" exitCode=0 Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.070515 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p84d" event={"ID":"b9b9d93e-5750-4704-a4da-cf78b81b8bd1","Type":"ContainerDied","Data":"c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59"} Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.070520 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8p84d" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.070546 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8p84d" event={"ID":"b9b9d93e-5750-4704-a4da-cf78b81b8bd1","Type":"ContainerDied","Data":"2a1282560a1936275ddd1223aba3398636d11838a1bd144910564b3114779eaa"} Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.070567 4717 scope.go:117] "RemoveContainer" containerID="c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.088923 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p84d"] Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.091553 4717 scope.go:117] "RemoveContainer" containerID="4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.095727 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8p84d"] Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.107300 4717 scope.go:117] "RemoveContainer" containerID="1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.121727 4717 scope.go:117] "RemoveContainer" containerID="c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59" Oct 02 14:24:59 crc kubenswrapper[4717]: E1002 14:24:59.122253 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59\": container with ID starting with c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59 not found: ID does not exist" containerID="c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.122304 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59"} err="failed to get container status \"c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59\": rpc error: code = NotFound desc = could not find container \"c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59\": container with ID starting with c70c7b1ddea3bf919f15c862a0e306ee7b782ea6c1c6a1a0e917acbf04e4ec59 not found: ID does not exist" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.122334 4717 scope.go:117] "RemoveContainer" containerID="4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34" Oct 02 14:24:59 crc kubenswrapper[4717]: E1002 14:24:59.122651 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34\": container with ID starting with 4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34 not found: ID does not exist" containerID="4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.122686 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34"} err="failed to get container status \"4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34\": rpc error: code = NotFound desc = could not find container \"4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34\": container with ID starting with 4048d2330eaf23f40f072c3db197bfec88c2d6c3dacae89c7cbe291792839b34 not found: ID does not exist" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.122716 4717 scope.go:117] "RemoveContainer" containerID="1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25" Oct 02 14:24:59 crc kubenswrapper[4717]: E1002 14:24:59.123044 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25\": container with ID starting with 1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25 not found: ID does not exist" containerID="1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25" Oct 02 14:24:59 crc kubenswrapper[4717]: I1002 14:24:59.123074 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25"} err="failed to get container status \"1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25\": rpc error: code = NotFound desc = could not find container \"1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25\": container with ID starting with 1f0d55d281a08281c12086eae2162eb935ddf723e6b30612081619cc92394b25 not found: ID does not exist" Oct 02 14:25:00 crc kubenswrapper[4717]: I1002 14:25:00.845322 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" path="/var/lib/kubelet/pods/b9b9d93e-5750-4704-a4da-cf78b81b8bd1/volumes" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.542338 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" podUID="9329366a-9fdd-45dd-9c5d-6139f6cc64c2" containerName="oauth-openshift" containerID="cri-o://24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3" gracePeriod=15 Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.884855 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914491 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7b49777cd7-nvrg4"] Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914677 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerName="extract-content" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914689 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerName="extract-content" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914702 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914708 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914717 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" containerName="extract-utilities" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914722 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" containerName="extract-utilities" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914731 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43487213-d8bb-45c6-849b-32be6867aa94" containerName="extract-utilities" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914738 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="43487213-d8bb-45c6-849b-32be6867aa94" containerName="extract-utilities" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914747 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd92a35-4f37-442d-9228-2401d57d015f" containerName="pruner" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914753 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd92a35-4f37-442d-9228-2401d57d015f" containerName="pruner" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914760 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" containerName="extract-content" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914767 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" containerName="extract-content" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914776 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="extract-utilities" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914781 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="extract-utilities" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914788 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914794 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914801 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerName="extract-utilities" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914807 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerName="extract-utilities" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914816 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914833 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914841 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9781fa-330d-4741-b182-0fdf0d1c394d" containerName="collect-profiles" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914848 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9781fa-330d-4741-b182-0fdf0d1c394d" containerName="collect-profiles" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914854 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43487213-d8bb-45c6-849b-32be6867aa94" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914861 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="43487213-d8bb-45c6-849b-32be6867aa94" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914869 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43487213-d8bb-45c6-849b-32be6867aa94" containerName="extract-content" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914874 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="43487213-d8bb-45c6-849b-32be6867aa94" containerName="extract-content" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914882 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9329366a-9fdd-45dd-9c5d-6139f6cc64c2" containerName="oauth-openshift" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914888 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9329366a-9fdd-45dd-9c5d-6139f6cc64c2" containerName="oauth-openshift" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914895 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="extract-content" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914901 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="extract-content" Oct 02 14:25:22 crc kubenswrapper[4717]: E1002 14:25:22.914908 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77" containerName="pruner" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.914914 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77" containerName="pruner" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915005 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c736886-bb37-4cee-ad32-53a707124bb4" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915030 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="54c7bec6-8a7a-4c8f-bb92-3280de831120" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915039 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="43487213-d8bb-45c6-849b-32be6867aa94" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915049 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb58ea3b-3294-40dc-bc7d-d62a4f2bdb77" containerName="pruner" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915058 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cd92a35-4f37-442d-9228-2401d57d015f" containerName="pruner" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915067 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="9329366a-9fdd-45dd-9c5d-6139f6cc64c2" containerName="oauth-openshift" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915075 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9b9d93e-5750-4704-a4da-cf78b81b8bd1" containerName="registry-server" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915082 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad9781fa-330d-4741-b182-0fdf0d1c394d" containerName="collect-profiles" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.915404 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.937889 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7b49777cd7-nvrg4"] Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.988620 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-login\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.988687 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf6zg\" (UniqueName: \"kubernetes.io/projected/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-kube-api-access-wf6zg\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.988852 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-cliconfig\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.989294 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-dir\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.989383 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.989415 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-router-certs\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.989448 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-policies\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.989483 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-session\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.989504 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-error\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.989700 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.989925 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.990053 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-serving-cert\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.990079 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-ocp-branding-template\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.990550 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-idp-0-file-data\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.990607 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-provider-selection\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.990632 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-service-ca\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.990660 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-trusted-ca-bundle\") pod \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\" (UID: \"9329366a-9fdd-45dd-9c5d-6139f6cc64c2\") " Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.990836 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-error\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.990883 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.991050 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.991122 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.991143 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-session\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.991159 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwwz2\" (UniqueName: \"kubernetes.io/projected/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-kube-api-access-wwwz2\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.991411 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992013 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992083 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992172 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992194 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992239 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-audit-dir\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992263 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-audit-policies\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992298 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-login\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992325 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992412 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992426 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992438 4717 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.992450 4717 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.993348 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.994892 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.995057 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.995423 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.995616 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.996202 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.996459 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:22 crc kubenswrapper[4717]: I1002 14:25:22.996647 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.004113 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-kube-api-access-wf6zg" (OuterVolumeSpecName: "kube-api-access-wf6zg") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "kube-api-access-wf6zg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.004637 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "9329366a-9fdd-45dd-9c5d-6139f6cc64c2" (UID: "9329366a-9fdd-45dd-9c5d-6139f6cc64c2"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.095703 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.095948 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.096073 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.096185 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-audit-dir\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.096239 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-audit-policies\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.096273 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-login\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.096304 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.096329 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-error\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.096372 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.097064 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.098660 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-audit-policies\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.099262 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-audit-dir\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.099665 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.099926 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100019 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100052 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-session\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100076 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwwz2\" (UniqueName: \"kubernetes.io/projected/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-kube-api-access-wwwz2\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100145 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100652 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100671 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100687 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100696 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100706 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100718 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100731 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf6zg\" (UniqueName: \"kubernetes.io/projected/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-kube-api-access-wf6zg\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100742 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100752 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.100761 4717 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/9329366a-9fdd-45dd-9c5d-6139f6cc64c2-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.102020 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.102670 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.103308 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.104042 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.107771 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.108007 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-error\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.108454 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-user-template-login\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.108642 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.109910 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-v4-0-config-system-session\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.119767 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwwz2\" (UniqueName: \"kubernetes.io/projected/623a5ac9-fadc-444f-a4cd-80a7b0bc68dc-kube-api-access-wwwz2\") pod \"oauth-openshift-7b49777cd7-nvrg4\" (UID: \"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.226265 4717 generic.go:334] "Generic (PLEG): container finished" podID="9329366a-9fdd-45dd-9c5d-6139f6cc64c2" containerID="24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3" exitCode=0 Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.226319 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" event={"ID":"9329366a-9fdd-45dd-9c5d-6139f6cc64c2","Type":"ContainerDied","Data":"24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3"} Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.226357 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.226383 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qmztn" event={"ID":"9329366a-9fdd-45dd-9c5d-6139f6cc64c2","Type":"ContainerDied","Data":"820d6cda793f1830cde4ff089bb8ad30d05546dd9bd4c72deabb8b5f946d70d6"} Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.226408 4717 scope.go:117] "RemoveContainer" containerID="24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.234891 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.256604 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qmztn"] Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.256610 4717 scope.go:117] "RemoveContainer" containerID="24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3" Oct 02 14:25:23 crc kubenswrapper[4717]: E1002 14:25:23.257625 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3\": container with ID starting with 24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3 not found: ID does not exist" containerID="24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.257657 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3"} err="failed to get container status \"24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3\": rpc error: code = NotFound desc = could not find container \"24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3\": container with ID starting with 24eb769bd4bc0cb434b98a4d3b8d8c3b4996616efacb83d58cbcd3b720578db3 not found: ID does not exist" Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.259450 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qmztn"] Oct 02 14:25:23 crc kubenswrapper[4717]: I1002 14:25:23.424511 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7b49777cd7-nvrg4"] Oct 02 14:25:24 crc kubenswrapper[4717]: I1002 14:25:24.233148 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" event={"ID":"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc","Type":"ContainerStarted","Data":"7cb0d541b2f1ee7222bfdb28ff6aa6c878869af2479c5117dad6376c11c36afb"} Oct 02 14:25:24 crc kubenswrapper[4717]: I1002 14:25:24.233530 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" event={"ID":"623a5ac9-fadc-444f-a4cd-80a7b0bc68dc","Type":"ContainerStarted","Data":"5c199ac2f5beebd2ff0a96ba499134d44edc8aace0243a56ed60378e1fb20acb"} Oct 02 14:25:24 crc kubenswrapper[4717]: I1002 14:25:24.233554 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:24 crc kubenswrapper[4717]: I1002 14:25:24.256164 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" podStartSLOduration=27.256147259 podStartE2EDuration="27.256147259s" podCreationTimestamp="2025-10-02 14:24:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:25:24.254624799 +0000 UTC m=+275.106479245" watchObservedRunningTime="2025-10-02 14:25:24.256147259 +0000 UTC m=+275.108001705" Oct 02 14:25:24 crc kubenswrapper[4717]: I1002 14:25:24.748183 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7b49777cd7-nvrg4" Oct 02 14:25:24 crc kubenswrapper[4717]: I1002 14:25:24.846569 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9329366a-9fdd-45dd-9c5d-6139f6cc64c2" path="/var/lib/kubelet/pods/9329366a-9fdd-45dd-9c5d-6139f6cc64c2/volumes" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.587216 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wrvgq"] Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.588248 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wrvgq" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerName="registry-server" containerID="cri-o://75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b" gracePeriod=30 Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.601225 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8bq8n"] Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.606896 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8bq8n" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerName="registry-server" containerID="cri-o://5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd" gracePeriod=30 Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.608679 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rncj"] Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.608820 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" podUID="2774fb34-ff9f-408e-a493-f1db8d7d8dc1" containerName="marketplace-operator" containerID="cri-o://becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d" gracePeriod=30 Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.621380 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x7gpq"] Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.621584 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-x7gpq" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerName="registry-server" containerID="cri-o://97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d" gracePeriod=30 Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.628420 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vxzvh"] Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.629025 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.634159 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pcqwp"] Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.634382 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pcqwp" podUID="5f296026-5d53-41bc-95bf-f949f536b981" containerName="registry-server" containerID="cri-o://37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7" gracePeriod=30 Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.648302 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vxzvh"] Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.733647 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83fecab7-2ae7-4bf6-88d5-7233871a02bc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.733751 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrhrj\" (UniqueName: \"kubernetes.io/projected/83fecab7-2ae7-4bf6-88d5-7233871a02bc-kube-api-access-qrhrj\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.733804 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83fecab7-2ae7-4bf6-88d5-7233871a02bc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.834235 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83fecab7-2ae7-4bf6-88d5-7233871a02bc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.834311 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrhrj\" (UniqueName: \"kubernetes.io/projected/83fecab7-2ae7-4bf6-88d5-7233871a02bc-kube-api-access-qrhrj\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.834349 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83fecab7-2ae7-4bf6-88d5-7233871a02bc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.835619 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/83fecab7-2ae7-4bf6-88d5-7233871a02bc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.848600 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/83fecab7-2ae7-4bf6-88d5-7233871a02bc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:42 crc kubenswrapper[4717]: I1002 14:25:42.854976 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrhrj\" (UniqueName: \"kubernetes.io/projected/83fecab7-2ae7-4bf6-88d5-7233871a02bc-kube-api-access-qrhrj\") pod \"marketplace-operator-79b997595-vxzvh\" (UID: \"83fecab7-2ae7-4bf6-88d5-7233871a02bc\") " pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.024529 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.037094 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.043050 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.043174 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.111232 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137018 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-catalog-content\") pod \"5f296026-5d53-41bc-95bf-f949f536b981\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137064 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26cfb\" (UniqueName: \"kubernetes.io/projected/5f296026-5d53-41bc-95bf-f949f536b981-kube-api-access-26cfb\") pod \"5f296026-5d53-41bc-95bf-f949f536b981\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137099 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpk6x\" (UniqueName: \"kubernetes.io/projected/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-kube-api-access-fpk6x\") pod \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137126 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-catalog-content\") pod \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137151 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-utilities\") pod \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137167 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-catalog-content\") pod \"cb25e02c-c6e5-4993-8181-ee135ab6f745\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137201 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-utilities\") pod \"cb25e02c-c6e5-4993-8181-ee135ab6f745\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137218 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2pdb\" (UniqueName: \"kubernetes.io/projected/cb25e02c-c6e5-4993-8181-ee135ab6f745-kube-api-access-x2pdb\") pod \"cb25e02c-c6e5-4993-8181-ee135ab6f745\" (UID: \"cb25e02c-c6e5-4993-8181-ee135ab6f745\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137238 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lq5n\" (UniqueName: \"kubernetes.io/projected/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-kube-api-access-6lq5n\") pod \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\" (UID: \"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137261 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-operator-metrics\") pod \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137280 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-trusted-ca\") pod \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\" (UID: \"2774fb34-ff9f-408e-a493-f1db8d7d8dc1\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.137317 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-utilities\") pod \"5f296026-5d53-41bc-95bf-f949f536b981\" (UID: \"5f296026-5d53-41bc-95bf-f949f536b981\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.138468 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-utilities" (OuterVolumeSpecName: "utilities") pod "5f296026-5d53-41bc-95bf-f949f536b981" (UID: "5f296026-5d53-41bc-95bf-f949f536b981"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.146148 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-utilities" (OuterVolumeSpecName: "utilities") pod "cb25e02c-c6e5-4993-8181-ee135ab6f745" (UID: "cb25e02c-c6e5-4993-8181-ee135ab6f745"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.146898 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-utilities" (OuterVolumeSpecName: "utilities") pod "5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" (UID: "5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.148685 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "2774fb34-ff9f-408e-a493-f1db8d7d8dc1" (UID: "2774fb34-ff9f-408e-a493-f1db8d7d8dc1"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.151817 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-kube-api-access-6lq5n" (OuterVolumeSpecName: "kube-api-access-6lq5n") pod "5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" (UID: "5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def"). InnerVolumeSpecName "kube-api-access-6lq5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.152448 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-kube-api-access-fpk6x" (OuterVolumeSpecName: "kube-api-access-fpk6x") pod "2774fb34-ff9f-408e-a493-f1db8d7d8dc1" (UID: "2774fb34-ff9f-408e-a493-f1db8d7d8dc1"). InnerVolumeSpecName "kube-api-access-fpk6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.155714 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f296026-5d53-41bc-95bf-f949f536b981-kube-api-access-26cfb" (OuterVolumeSpecName: "kube-api-access-26cfb") pod "5f296026-5d53-41bc-95bf-f949f536b981" (UID: "5f296026-5d53-41bc-95bf-f949f536b981"). InnerVolumeSpecName "kube-api-access-26cfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.156698 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "2774fb34-ff9f-408e-a493-f1db8d7d8dc1" (UID: "2774fb34-ff9f-408e-a493-f1db8d7d8dc1"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.157432 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb25e02c-c6e5-4993-8181-ee135ab6f745" (UID: "cb25e02c-c6e5-4993-8181-ee135ab6f745"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.162207 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb25e02c-c6e5-4993-8181-ee135ab6f745-kube-api-access-x2pdb" (OuterVolumeSpecName: "kube-api-access-x2pdb") pod "cb25e02c-c6e5-4993-8181-ee135ab6f745" (UID: "cb25e02c-c6e5-4993-8181-ee135ab6f745"). InnerVolumeSpecName "kube-api-access-x2pdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.192863 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" (UID: "5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.239907 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f296026-5d53-41bc-95bf-f949f536b981" (UID: "5f296026-5d53-41bc-95bf-f949f536b981"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240387 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lq5n\" (UniqueName: \"kubernetes.io/projected/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-kube-api-access-6lq5n\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240412 4717 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240429 4717 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240443 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240457 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f296026-5d53-41bc-95bf-f949f536b981-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240469 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26cfb\" (UniqueName: \"kubernetes.io/projected/5f296026-5d53-41bc-95bf-f949f536b981-kube-api-access-26cfb\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240482 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpk6x\" (UniqueName: \"kubernetes.io/projected/2774fb34-ff9f-408e-a493-f1db8d7d8dc1-kube-api-access-fpk6x\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240494 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240505 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240517 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240528 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb25e02c-c6e5-4993-8181-ee135ab6f745-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.240541 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2pdb\" (UniqueName: \"kubernetes.io/projected/cb25e02c-c6e5-4993-8181-ee135ab6f745-kube-api-access-x2pdb\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.291121 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-vxzvh"] Oct 02 14:25:43 crc kubenswrapper[4717]: W1002 14:25:43.298605 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83fecab7_2ae7_4bf6_88d5_7233871a02bc.slice/crio-fc7a90b86e7eea841039c4e7f72f8365180bb9954c571fcd444268fc147f9d69 WatchSource:0}: Error finding container fc7a90b86e7eea841039c4e7f72f8365180bb9954c571fcd444268fc147f9d69: Status 404 returned error can't find the container with id fc7a90b86e7eea841039c4e7f72f8365180bb9954c571fcd444268fc147f9d69 Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.306392 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.341080 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-utilities\") pod \"11a20c01-b527-4b1f-8b37-cb369059c70d\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.341114 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-catalog-content\") pod \"11a20c01-b527-4b1f-8b37-cb369059c70d\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.341142 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzcqd\" (UniqueName: \"kubernetes.io/projected/11a20c01-b527-4b1f-8b37-cb369059c70d-kube-api-access-vzcqd\") pod \"11a20c01-b527-4b1f-8b37-cb369059c70d\" (UID: \"11a20c01-b527-4b1f-8b37-cb369059c70d\") " Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.342687 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-utilities" (OuterVolumeSpecName: "utilities") pod "11a20c01-b527-4b1f-8b37-cb369059c70d" (UID: "11a20c01-b527-4b1f-8b37-cb369059c70d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.343775 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11a20c01-b527-4b1f-8b37-cb369059c70d-kube-api-access-vzcqd" (OuterVolumeSpecName: "kube-api-access-vzcqd") pod "11a20c01-b527-4b1f-8b37-cb369059c70d" (UID: "11a20c01-b527-4b1f-8b37-cb369059c70d"). InnerVolumeSpecName "kube-api-access-vzcqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.349276 4717 generic.go:334] "Generic (PLEG): container finished" podID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerID="5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd" exitCode=0 Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.349383 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bq8n" event={"ID":"11a20c01-b527-4b1f-8b37-cb369059c70d","Type":"ContainerDied","Data":"5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.349422 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bq8n" event={"ID":"11a20c01-b527-4b1f-8b37-cb369059c70d","Type":"ContainerDied","Data":"1f7ea01f1413bfd48312ba16bcb64d187ae0ddd19170de9f42b2357588d844a8"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.349450 4717 scope.go:117] "RemoveContainer" containerID="5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.349470 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bq8n" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.355155 4717 generic.go:334] "Generic (PLEG): container finished" podID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerID="75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b" exitCode=0 Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.355365 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrvgq" event={"ID":"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def","Type":"ContainerDied","Data":"75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.355374 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wrvgq" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.355401 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrvgq" event={"ID":"5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def","Type":"ContainerDied","Data":"74f1cbc49660241b03c15b0ccc9ea905a1e1b2f8379b191df38d54854932766e"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.358802 4717 generic.go:334] "Generic (PLEG): container finished" podID="2774fb34-ff9f-408e-a493-f1db8d7d8dc1" containerID="becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d" exitCode=0 Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.358882 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" event={"ID":"2774fb34-ff9f-408e-a493-f1db8d7d8dc1","Type":"ContainerDied","Data":"becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.358865 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.358991 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8rncj" event={"ID":"2774fb34-ff9f-408e-a493-f1db8d7d8dc1","Type":"ContainerDied","Data":"b6adb91404f75719e0cca4f21c0097f5aa92dc479de56176d85f74a69e3f5c42"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.361622 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" event={"ID":"83fecab7-2ae7-4bf6-88d5-7233871a02bc","Type":"ContainerStarted","Data":"fc7a90b86e7eea841039c4e7f72f8365180bb9954c571fcd444268fc147f9d69"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.368258 4717 generic.go:334] "Generic (PLEG): container finished" podID="5f296026-5d53-41bc-95bf-f949f536b981" containerID="37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7" exitCode=0 Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.368380 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcqwp" event={"ID":"5f296026-5d53-41bc-95bf-f949f536b981","Type":"ContainerDied","Data":"37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.368416 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcqwp" event={"ID":"5f296026-5d53-41bc-95bf-f949f536b981","Type":"ContainerDied","Data":"b82fc33cc16adfbd086a036d610790d46e0239870be7e6fc4ae353a9b233cfde"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.368451 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcqwp" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.370693 4717 generic.go:334] "Generic (PLEG): container finished" podID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerID="97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d" exitCode=0 Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.370739 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x7gpq" event={"ID":"cb25e02c-c6e5-4993-8181-ee135ab6f745","Type":"ContainerDied","Data":"97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.370775 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x7gpq" event={"ID":"cb25e02c-c6e5-4993-8181-ee135ab6f745","Type":"ContainerDied","Data":"4facf1f8c8d24bd533dccd36a933f7eff9417137214a9bb25117e3cb96186818"} Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.370797 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x7gpq" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.398847 4717 scope.go:117] "RemoveContainer" containerID="b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.408366 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rncj"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.415155 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rncj"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.420595 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wrvgq"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.433989 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wrvgq"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.436702 4717 scope.go:117] "RemoveContainer" containerID="1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.439368 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pcqwp"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.442394 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.442459 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzcqd\" (UniqueName: \"kubernetes.io/projected/11a20c01-b527-4b1f-8b37-cb369059c70d-kube-api-access-vzcqd\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.442691 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pcqwp"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.449454 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x7gpq"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.452286 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-x7gpq"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.460973 4717 scope.go:117] "RemoveContainer" containerID="5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.461432 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd\": container with ID starting with 5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd not found: ID does not exist" containerID="5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.461474 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd"} err="failed to get container status \"5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd\": rpc error: code = NotFound desc = could not find container \"5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd\": container with ID starting with 5208e519242912d0b578467b0f3c8e526e403970db00d5edc4f04ea5c886ffdd not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.461499 4717 scope.go:117] "RemoveContainer" containerID="b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.461915 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7\": container with ID starting with b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7 not found: ID does not exist" containerID="b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.461971 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7"} err="failed to get container status \"b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7\": rpc error: code = NotFound desc = could not find container \"b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7\": container with ID starting with b1956f4780d4f6303505fcd23fc27bc8ec3fe5bd2925271d27a3b7d9de89f9c7 not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.461999 4717 scope.go:117] "RemoveContainer" containerID="1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.462314 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee\": container with ID starting with 1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee not found: ID does not exist" containerID="1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.462342 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee"} err="failed to get container status \"1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee\": rpc error: code = NotFound desc = could not find container \"1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee\": container with ID starting with 1b8944f32b2fd63b2b37bb136aec97f8476524f5c41043c621969c66ba80d5ee not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.462359 4717 scope.go:117] "RemoveContainer" containerID="75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.472852 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11a20c01-b527-4b1f-8b37-cb369059c70d" (UID: "11a20c01-b527-4b1f-8b37-cb369059c70d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.478733 4717 scope.go:117] "RemoveContainer" containerID="4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.491128 4717 scope.go:117] "RemoveContainer" containerID="e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.504824 4717 scope.go:117] "RemoveContainer" containerID="75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.505206 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b\": container with ID starting with 75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b not found: ID does not exist" containerID="75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.505241 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b"} err="failed to get container status \"75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b\": rpc error: code = NotFound desc = could not find container \"75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b\": container with ID starting with 75d38815c314e14b92746dec9321b17c0ffa430d06cc07028c1c00db9c80ce5b not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.505270 4717 scope.go:117] "RemoveContainer" containerID="4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.505601 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec\": container with ID starting with 4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec not found: ID does not exist" containerID="4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.505630 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec"} err="failed to get container status \"4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec\": rpc error: code = NotFound desc = could not find container \"4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec\": container with ID starting with 4f899a2d73549c2eba667719e882acdac1519ed89b23f0d842c1f2ca993866ec not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.505648 4717 scope.go:117] "RemoveContainer" containerID="e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.505855 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692\": container with ID starting with e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692 not found: ID does not exist" containerID="e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.505884 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692"} err="failed to get container status \"e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692\": rpc error: code = NotFound desc = could not find container \"e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692\": container with ID starting with e420bdffdf6ad420be6b2889fafb94f2dfe8b80a6b10a48b7f47b434c6f4c692 not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.505901 4717 scope.go:117] "RemoveContainer" containerID="becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.517637 4717 scope.go:117] "RemoveContainer" containerID="becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.518056 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d\": container with ID starting with becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d not found: ID does not exist" containerID="becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.518199 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d"} err="failed to get container status \"becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d\": rpc error: code = NotFound desc = could not find container \"becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d\": container with ID starting with becfb47ef60223d33c84d0073af833acb9509e1809766de76cb839271515677d not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.518472 4717 scope.go:117] "RemoveContainer" containerID="37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.531749 4717 scope.go:117] "RemoveContainer" containerID="5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.543498 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11a20c01-b527-4b1f-8b37-cb369059c70d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.548407 4717 scope.go:117] "RemoveContainer" containerID="fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.564261 4717 scope.go:117] "RemoveContainer" containerID="37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.564836 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7\": container with ID starting with 37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7 not found: ID does not exist" containerID="37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.564886 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7"} err="failed to get container status \"37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7\": rpc error: code = NotFound desc = could not find container \"37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7\": container with ID starting with 37eb5d1186d5e8036c96c128e6e35b4a58d3015e3173a81e2ac1e878307ac5c7 not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.564918 4717 scope.go:117] "RemoveContainer" containerID="5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.565909 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3\": container with ID starting with 5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3 not found: ID does not exist" containerID="5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.565970 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3"} err="failed to get container status \"5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3\": rpc error: code = NotFound desc = could not find container \"5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3\": container with ID starting with 5d44ce3b0ae8d863ba8ab865a2065711fb0497de17d21f92a42844db2ad706f3 not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.565998 4717 scope.go:117] "RemoveContainer" containerID="fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.566315 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1\": container with ID starting with fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1 not found: ID does not exist" containerID="fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.566337 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1"} err="failed to get container status \"fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1\": rpc error: code = NotFound desc = could not find container \"fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1\": container with ID starting with fa3db573dcb50488665d837c5ab3f740faacf323698c4f9f8c8fd36df00c41b1 not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.566352 4717 scope.go:117] "RemoveContainer" containerID="97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.581115 4717 scope.go:117] "RemoveContainer" containerID="70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.596644 4717 scope.go:117] "RemoveContainer" containerID="905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.610644 4717 scope.go:117] "RemoveContainer" containerID="97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.611164 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d\": container with ID starting with 97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d not found: ID does not exist" containerID="97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.611208 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d"} err="failed to get container status \"97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d\": rpc error: code = NotFound desc = could not find container \"97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d\": container with ID starting with 97333ac84664c4be4188694ffcec47641a8e0cc0a29b78c12e3c9ecef502243d not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.611236 4717 scope.go:117] "RemoveContainer" containerID="70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.611672 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b\": container with ID starting with 70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b not found: ID does not exist" containerID="70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.611706 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b"} err="failed to get container status \"70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b\": rpc error: code = NotFound desc = could not find container \"70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b\": container with ID starting with 70e4b388eb99e7f6e315d29ba8c7f77d4cc29d8323094f524bfae6928af2bd1b not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.611720 4717 scope.go:117] "RemoveContainer" containerID="905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0" Oct 02 14:25:43 crc kubenswrapper[4717]: E1002 14:25:43.611962 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0\": container with ID starting with 905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0 not found: ID does not exist" containerID="905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.611983 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0"} err="failed to get container status \"905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0\": rpc error: code = NotFound desc = could not find container \"905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0\": container with ID starting with 905aa1ccd7e3f5b99c1fe8a4662baddef018bf59ca3382dacbe40500259db1c0 not found: ID does not exist" Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.673236 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8bq8n"] Oct 02 14:25:43 crc kubenswrapper[4717]: I1002 14:25:43.680405 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8bq8n"] Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.377465 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" event={"ID":"83fecab7-2ae7-4bf6-88d5-7233871a02bc","Type":"ContainerStarted","Data":"3ac5c0d3c3a180526746914bdd6ecb8d30e8e229c160499f1fd2da2fcf573b2b"} Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.377552 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.384654 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.404210 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-vxzvh" podStartSLOduration=2.40418642 podStartE2EDuration="2.40418642s" podCreationTimestamp="2025-10-02 14:25:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:25:44.393907901 +0000 UTC m=+295.245762347" watchObservedRunningTime="2025-10-02 14:25:44.40418642 +0000 UTC m=+295.256040866" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804416 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rdghh"] Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804601 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804612 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804621 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerName="extract-utilities" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804627 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerName="extract-utilities" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804635 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerName="extract-content" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804642 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerName="extract-content" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804648 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerName="extract-utilities" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804654 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerName="extract-utilities" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804663 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804668 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804679 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerName="extract-content" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804685 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerName="extract-content" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804695 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerName="extract-content" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804700 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerName="extract-content" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804712 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerName="extract-utilities" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804719 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerName="extract-utilities" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804725 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f296026-5d53-41bc-95bf-f949f536b981" containerName="extract-utilities" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804730 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f296026-5d53-41bc-95bf-f949f536b981" containerName="extract-utilities" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804737 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804743 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804749 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f296026-5d53-41bc-95bf-f949f536b981" containerName="extract-content" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804754 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f296026-5d53-41bc-95bf-f949f536b981" containerName="extract-content" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804760 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f296026-5d53-41bc-95bf-f949f536b981" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.804765 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f296026-5d53-41bc-95bf-f949f536b981" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: E1002 14:25:44.804774 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2774fb34-ff9f-408e-a493-f1db8d7d8dc1" containerName="marketplace-operator" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.805253 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="2774fb34-ff9f-408e-a493-f1db8d7d8dc1" containerName="marketplace-operator" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.805343 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="2774fb34-ff9f-408e-a493-f1db8d7d8dc1" containerName="marketplace-operator" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.805358 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.805365 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f296026-5d53-41bc-95bf-f949f536b981" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.805374 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.805384 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" containerName="registry-server" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.806011 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.807947 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.815548 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rdghh"] Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.844714 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11a20c01-b527-4b1f-8b37-cb369059c70d" path="/var/lib/kubelet/pods/11a20c01-b527-4b1f-8b37-cb369059c70d/volumes" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.845849 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2774fb34-ff9f-408e-a493-f1db8d7d8dc1" path="/var/lib/kubelet/pods/2774fb34-ff9f-408e-a493-f1db8d7d8dc1/volumes" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.846515 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def" path="/var/lib/kubelet/pods/5c5ebf2c-a6f4-476f-b0dd-3b9fd0310def/volumes" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.847795 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f296026-5d53-41bc-95bf-f949f536b981" path="/var/lib/kubelet/pods/5f296026-5d53-41bc-95bf-f949f536b981/volumes" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.848609 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb25e02c-c6e5-4993-8181-ee135ab6f745" path="/var/lib/kubelet/pods/cb25e02c-c6e5-4993-8181-ee135ab6f745/volumes" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.867645 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9960bc8-e961-4f52-bb65-5b59f0858a5c-utilities\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.867854 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9960bc8-e961-4f52-bb65-5b59f0858a5c-catalog-content\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:44 crc kubenswrapper[4717]: I1002 14:25:44.868125 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgfjm\" (UniqueName: \"kubernetes.io/projected/f9960bc8-e961-4f52-bb65-5b59f0858a5c-kube-api-access-jgfjm\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.011068 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9960bc8-e961-4f52-bb65-5b59f0858a5c-catalog-content\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.011130 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgfjm\" (UniqueName: \"kubernetes.io/projected/f9960bc8-e961-4f52-bb65-5b59f0858a5c-kube-api-access-jgfjm\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.011163 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9960bc8-e961-4f52-bb65-5b59f0858a5c-utilities\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.011827 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9960bc8-e961-4f52-bb65-5b59f0858a5c-utilities\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.011912 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9960bc8-e961-4f52-bb65-5b59f0858a5c-catalog-content\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.020010 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-87wh7"] Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.021022 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.023049 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.027616 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-87wh7"] Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.037997 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgfjm\" (UniqueName: \"kubernetes.io/projected/f9960bc8-e961-4f52-bb65-5b59f0858a5c-kube-api-access-jgfjm\") pod \"certified-operators-rdghh\" (UID: \"f9960bc8-e961-4f52-bb65-5b59f0858a5c\") " pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.112168 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-catalog-content\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.112213 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dqbn\" (UniqueName: \"kubernetes.io/projected/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-kube-api-access-4dqbn\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.112238 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-utilities\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.126918 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.212826 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-catalog-content\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.213150 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dqbn\" (UniqueName: \"kubernetes.io/projected/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-kube-api-access-4dqbn\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.213176 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-utilities\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.213624 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-utilities\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.213683 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-catalog-content\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.230668 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dqbn\" (UniqueName: \"kubernetes.io/projected/f44d4e10-b4a4-4d65-8c76-7907c8a5d882-kube-api-access-4dqbn\") pod \"community-operators-87wh7\" (UID: \"f44d4e10-b4a4-4d65-8c76-7907c8a5d882\") " pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.305426 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rdghh"] Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.335801 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.395508 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rdghh" event={"ID":"f9960bc8-e961-4f52-bb65-5b59f0858a5c","Type":"ContainerStarted","Data":"f0179cb9fb0134eec948a36a53512a72f092c2997e376a7d9ecbcebc2d671484"} Oct 02 14:25:45 crc kubenswrapper[4717]: I1002 14:25:45.507907 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-87wh7"] Oct 02 14:25:45 crc kubenswrapper[4717]: W1002 14:25:45.538707 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf44d4e10_b4a4_4d65_8c76_7907c8a5d882.slice/crio-c98e2c22ecd686cd4ae282b141d256691a0797d02d4232ec26fecd85a23f2f3c WatchSource:0}: Error finding container c98e2c22ecd686cd4ae282b141d256691a0797d02d4232ec26fecd85a23f2f3c: Status 404 returned error can't find the container with id c98e2c22ecd686cd4ae282b141d256691a0797d02d4232ec26fecd85a23f2f3c Oct 02 14:25:46 crc kubenswrapper[4717]: I1002 14:25:46.401665 4717 generic.go:334] "Generic (PLEG): container finished" podID="f9960bc8-e961-4f52-bb65-5b59f0858a5c" containerID="cacf5120a8818f191ddfb208e0fde3281ed0005664160e86f932aa7094fa6da3" exitCode=0 Oct 02 14:25:46 crc kubenswrapper[4717]: I1002 14:25:46.401724 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rdghh" event={"ID":"f9960bc8-e961-4f52-bb65-5b59f0858a5c","Type":"ContainerDied","Data":"cacf5120a8818f191ddfb208e0fde3281ed0005664160e86f932aa7094fa6da3"} Oct 02 14:25:46 crc kubenswrapper[4717]: I1002 14:25:46.403156 4717 generic.go:334] "Generic (PLEG): container finished" podID="f44d4e10-b4a4-4d65-8c76-7907c8a5d882" containerID="e31fcf0871ef2e003616c97307e22e967b862444acac59a43abba45b8b6085a3" exitCode=0 Oct 02 14:25:46 crc kubenswrapper[4717]: I1002 14:25:46.403231 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-87wh7" event={"ID":"f44d4e10-b4a4-4d65-8c76-7907c8a5d882","Type":"ContainerDied","Data":"e31fcf0871ef2e003616c97307e22e967b862444acac59a43abba45b8b6085a3"} Oct 02 14:25:46 crc kubenswrapper[4717]: I1002 14:25:46.403270 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-87wh7" event={"ID":"f44d4e10-b4a4-4d65-8c76-7907c8a5d882","Type":"ContainerStarted","Data":"c98e2c22ecd686cd4ae282b141d256691a0797d02d4232ec26fecd85a23f2f3c"} Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.210106 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cf592"] Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.211112 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.215439 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.218630 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cf592"] Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.342854 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwvq8\" (UniqueName: \"kubernetes.io/projected/942f28a4-5dee-444a-8528-1794e832be15-kube-api-access-kwvq8\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.343207 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/942f28a4-5dee-444a-8528-1794e832be15-utilities\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.343341 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/942f28a4-5dee-444a-8528-1794e832be15-catalog-content\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.403135 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2wl69"] Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.404138 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.407664 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.414851 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2wl69"] Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.416674 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-87wh7" event={"ID":"f44d4e10-b4a4-4d65-8c76-7907c8a5d882","Type":"ContainerStarted","Data":"947dcf9bc3689a8b4f5b48bafaf6d64e40e9279677d67fc72a5fd10f9ccff254"} Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.444672 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwvq8\" (UniqueName: \"kubernetes.io/projected/942f28a4-5dee-444a-8528-1794e832be15-kube-api-access-kwvq8\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.444716 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/942f28a4-5dee-444a-8528-1794e832be15-utilities\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.444770 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/942f28a4-5dee-444a-8528-1794e832be15-catalog-content\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.445259 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/942f28a4-5dee-444a-8528-1794e832be15-catalog-content\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.445554 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/942f28a4-5dee-444a-8528-1794e832be15-utilities\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.464614 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwvq8\" (UniqueName: \"kubernetes.io/projected/942f28a4-5dee-444a-8528-1794e832be15-kube-api-access-kwvq8\") pod \"redhat-marketplace-cf592\" (UID: \"942f28a4-5dee-444a-8528-1794e832be15\") " pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.546335 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t85dn\" (UniqueName: \"kubernetes.io/projected/8f70b22a-c207-48df-90fc-ea87f232da17-kube-api-access-t85dn\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.546390 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f70b22a-c207-48df-90fc-ea87f232da17-utilities\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.546560 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f70b22a-c207-48df-90fc-ea87f232da17-catalog-content\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.595540 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.647981 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t85dn\" (UniqueName: \"kubernetes.io/projected/8f70b22a-c207-48df-90fc-ea87f232da17-kube-api-access-t85dn\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.648429 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f70b22a-c207-48df-90fc-ea87f232da17-utilities\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.648472 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f70b22a-c207-48df-90fc-ea87f232da17-catalog-content\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.648841 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f70b22a-c207-48df-90fc-ea87f232da17-catalog-content\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.649079 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f70b22a-c207-48df-90fc-ea87f232da17-utilities\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.665123 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t85dn\" (UniqueName: \"kubernetes.io/projected/8f70b22a-c207-48df-90fc-ea87f232da17-kube-api-access-t85dn\") pod \"redhat-operators-2wl69\" (UID: \"8f70b22a-c207-48df-90fc-ea87f232da17\") " pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.724564 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.784879 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cf592"] Oct 02 14:25:47 crc kubenswrapper[4717]: W1002 14:25:47.789483 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod942f28a4_5dee_444a_8528_1794e832be15.slice/crio-f19e48fdc83073b27d59eea8d009446977fe6bad83b7fe112ec616002a245d6a WatchSource:0}: Error finding container f19e48fdc83073b27d59eea8d009446977fe6bad83b7fe112ec616002a245d6a: Status 404 returned error can't find the container with id f19e48fdc83073b27d59eea8d009446977fe6bad83b7fe112ec616002a245d6a Oct 02 14:25:47 crc kubenswrapper[4717]: I1002 14:25:47.899531 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2wl69"] Oct 02 14:25:47 crc kubenswrapper[4717]: W1002 14:25:47.906841 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f70b22a_c207_48df_90fc_ea87f232da17.slice/crio-66261556839c000825a279cf095a00e8e7cd94cc2fba9c50a053c4ed6987ba82 WatchSource:0}: Error finding container 66261556839c000825a279cf095a00e8e7cd94cc2fba9c50a053c4ed6987ba82: Status 404 returned error can't find the container with id 66261556839c000825a279cf095a00e8e7cd94cc2fba9c50a053c4ed6987ba82 Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.423000 4717 generic.go:334] "Generic (PLEG): container finished" podID="f9960bc8-e961-4f52-bb65-5b59f0858a5c" containerID="d4187221bbb1f6432edbc443b145498eb60d75345173efd356ae25e0e69a6ebc" exitCode=0 Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.423983 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rdghh" event={"ID":"f9960bc8-e961-4f52-bb65-5b59f0858a5c","Type":"ContainerDied","Data":"d4187221bbb1f6432edbc443b145498eb60d75345173efd356ae25e0e69a6ebc"} Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.425170 4717 generic.go:334] "Generic (PLEG): container finished" podID="942f28a4-5dee-444a-8528-1794e832be15" containerID="9d99881aa0f8100731bf4d26c0c713a4f95aee3b0e384efd61081d009874c659" exitCode=0 Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.425459 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf592" event={"ID":"942f28a4-5dee-444a-8528-1794e832be15","Type":"ContainerDied","Data":"9d99881aa0f8100731bf4d26c0c713a4f95aee3b0e384efd61081d009874c659"} Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.425523 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf592" event={"ID":"942f28a4-5dee-444a-8528-1794e832be15","Type":"ContainerStarted","Data":"f19e48fdc83073b27d59eea8d009446977fe6bad83b7fe112ec616002a245d6a"} Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.428369 4717 generic.go:334] "Generic (PLEG): container finished" podID="8f70b22a-c207-48df-90fc-ea87f232da17" containerID="6b53ddc03de6e32b7173a5ec27be2d3778c8a877a43616c1390dcb7f472d830f" exitCode=0 Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.428481 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wl69" event={"ID":"8f70b22a-c207-48df-90fc-ea87f232da17","Type":"ContainerDied","Data":"6b53ddc03de6e32b7173a5ec27be2d3778c8a877a43616c1390dcb7f472d830f"} Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.428668 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wl69" event={"ID":"8f70b22a-c207-48df-90fc-ea87f232da17","Type":"ContainerStarted","Data":"66261556839c000825a279cf095a00e8e7cd94cc2fba9c50a053c4ed6987ba82"} Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.430783 4717 generic.go:334] "Generic (PLEG): container finished" podID="f44d4e10-b4a4-4d65-8c76-7907c8a5d882" containerID="947dcf9bc3689a8b4f5b48bafaf6d64e40e9279677d67fc72a5fd10f9ccff254" exitCode=0 Oct 02 14:25:48 crc kubenswrapper[4717]: I1002 14:25:48.430806 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-87wh7" event={"ID":"f44d4e10-b4a4-4d65-8c76-7907c8a5d882","Type":"ContainerDied","Data":"947dcf9bc3689a8b4f5b48bafaf6d64e40e9279677d67fc72a5fd10f9ccff254"} Oct 02 14:25:49 crc kubenswrapper[4717]: I1002 14:25:49.440132 4717 generic.go:334] "Generic (PLEG): container finished" podID="942f28a4-5dee-444a-8528-1794e832be15" containerID="443d020f2e77867c91aefde1decd56d99a51ffb5b8a3368e8b933287cc2118c8" exitCode=0 Oct 02 14:25:49 crc kubenswrapper[4717]: I1002 14:25:49.440335 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf592" event={"ID":"942f28a4-5dee-444a-8528-1794e832be15","Type":"ContainerDied","Data":"443d020f2e77867c91aefde1decd56d99a51ffb5b8a3368e8b933287cc2118c8"} Oct 02 14:25:49 crc kubenswrapper[4717]: I1002 14:25:49.443662 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wl69" event={"ID":"8f70b22a-c207-48df-90fc-ea87f232da17","Type":"ContainerStarted","Data":"da3d01ad697a7dbd9205bc07ae902bcc3ebc2f32f8fa72d9ca4122975c10a3a9"} Oct 02 14:25:49 crc kubenswrapper[4717]: I1002 14:25:49.449226 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-87wh7" event={"ID":"f44d4e10-b4a4-4d65-8c76-7907c8a5d882","Type":"ContainerStarted","Data":"33093e606619efa459cc1d0e5871063760353251e90f5f3b47ad109938d9ba0b"} Oct 02 14:25:49 crc kubenswrapper[4717]: I1002 14:25:49.451527 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rdghh" event={"ID":"f9960bc8-e961-4f52-bb65-5b59f0858a5c","Type":"ContainerStarted","Data":"43c1a27d7c63077446995c50bbb9e9c6c69b10f565533246319d45e44c07f53c"} Oct 02 14:25:49 crc kubenswrapper[4717]: I1002 14:25:49.483730 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rdghh" podStartSLOduration=2.952236331 podStartE2EDuration="5.483711148s" podCreationTimestamp="2025-10-02 14:25:44 +0000 UTC" firstStartedPulling="2025-10-02 14:25:46.404406812 +0000 UTC m=+297.256261258" lastFinishedPulling="2025-10-02 14:25:48.935881629 +0000 UTC m=+299.787736075" observedRunningTime="2025-10-02 14:25:49.481232392 +0000 UTC m=+300.333086848" watchObservedRunningTime="2025-10-02 14:25:49.483711148 +0000 UTC m=+300.335565594" Oct 02 14:25:49 crc kubenswrapper[4717]: I1002 14:25:49.500285 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-87wh7" podStartSLOduration=2.849210224 podStartE2EDuration="5.500261031s" podCreationTimestamp="2025-10-02 14:25:44 +0000 UTC" firstStartedPulling="2025-10-02 14:25:46.405179462 +0000 UTC m=+297.257033908" lastFinishedPulling="2025-10-02 14:25:49.056230269 +0000 UTC m=+299.908084715" observedRunningTime="2025-10-02 14:25:49.500117427 +0000 UTC m=+300.351971893" watchObservedRunningTime="2025-10-02 14:25:49.500261031 +0000 UTC m=+300.352115477" Oct 02 14:25:50 crc kubenswrapper[4717]: I1002 14:25:50.458544 4717 generic.go:334] "Generic (PLEG): container finished" podID="8f70b22a-c207-48df-90fc-ea87f232da17" containerID="da3d01ad697a7dbd9205bc07ae902bcc3ebc2f32f8fa72d9ca4122975c10a3a9" exitCode=0 Oct 02 14:25:50 crc kubenswrapper[4717]: I1002 14:25:50.458636 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wl69" event={"ID":"8f70b22a-c207-48df-90fc-ea87f232da17","Type":"ContainerDied","Data":"da3d01ad697a7dbd9205bc07ae902bcc3ebc2f32f8fa72d9ca4122975c10a3a9"} Oct 02 14:25:51 crc kubenswrapper[4717]: I1002 14:25:51.465206 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cf592" event={"ID":"942f28a4-5dee-444a-8528-1794e832be15","Type":"ContainerStarted","Data":"dd461bb2b768dd4891901df801cb02b4768c954e89edcbfad78c9ec68abfb1ac"} Oct 02 14:25:51 crc kubenswrapper[4717]: I1002 14:25:51.466692 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2wl69" event={"ID":"8f70b22a-c207-48df-90fc-ea87f232da17","Type":"ContainerStarted","Data":"ea6a8a3fbc6b2e149b96439256480a336a79c2d8e64efd9879bc5aab9e79b201"} Oct 02 14:25:51 crc kubenswrapper[4717]: I1002 14:25:51.483427 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cf592" podStartSLOduration=2.9799722859999997 podStartE2EDuration="4.483411896s" podCreationTimestamp="2025-10-02 14:25:47 +0000 UTC" firstStartedPulling="2025-10-02 14:25:48.42621738 +0000 UTC m=+299.278071826" lastFinishedPulling="2025-10-02 14:25:49.92965699 +0000 UTC m=+300.781511436" observedRunningTime="2025-10-02 14:25:51.482608496 +0000 UTC m=+302.334462952" watchObservedRunningTime="2025-10-02 14:25:51.483411896 +0000 UTC m=+302.335266342" Oct 02 14:25:51 crc kubenswrapper[4717]: I1002 14:25:51.501370 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2wl69" podStartSLOduration=1.942097891 podStartE2EDuration="4.501350546s" podCreationTimestamp="2025-10-02 14:25:47 +0000 UTC" firstStartedPulling="2025-10-02 14:25:48.429622248 +0000 UTC m=+299.281476694" lastFinishedPulling="2025-10-02 14:25:50.988874903 +0000 UTC m=+301.840729349" observedRunningTime="2025-10-02 14:25:51.499443716 +0000 UTC m=+302.351298162" watchObservedRunningTime="2025-10-02 14:25:51.501350546 +0000 UTC m=+302.353204992" Oct 02 14:25:55 crc kubenswrapper[4717]: I1002 14:25:55.127501 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:55 crc kubenswrapper[4717]: I1002 14:25:55.127921 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:55 crc kubenswrapper[4717]: I1002 14:25:55.170315 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:55 crc kubenswrapper[4717]: I1002 14:25:55.336164 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:55 crc kubenswrapper[4717]: I1002 14:25:55.336201 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:55 crc kubenswrapper[4717]: I1002 14:25:55.374255 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:55 crc kubenswrapper[4717]: I1002 14:25:55.522576 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rdghh" Oct 02 14:25:55 crc kubenswrapper[4717]: I1002 14:25:55.523528 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-87wh7" Oct 02 14:25:57 crc kubenswrapper[4717]: I1002 14:25:57.595719 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:57 crc kubenswrapper[4717]: I1002 14:25:57.595768 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:57 crc kubenswrapper[4717]: I1002 14:25:57.631731 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:57 crc kubenswrapper[4717]: I1002 14:25:57.725240 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:57 crc kubenswrapper[4717]: I1002 14:25:57.725283 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:57 crc kubenswrapper[4717]: I1002 14:25:57.763265 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:25:58 crc kubenswrapper[4717]: I1002 14:25:58.538733 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cf592" Oct 02 14:25:58 crc kubenswrapper[4717]: I1002 14:25:58.540439 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2wl69" Oct 02 14:26:48 crc kubenswrapper[4717]: I1002 14:26:48.620604 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:26:48 crc kubenswrapper[4717]: I1002 14:26:48.621179 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:27:18 crc kubenswrapper[4717]: I1002 14:27:18.620685 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:27:18 crc kubenswrapper[4717]: I1002 14:27:18.621286 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:27:48 crc kubenswrapper[4717]: I1002 14:27:48.620417 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:27:48 crc kubenswrapper[4717]: I1002 14:27:48.620872 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:27:48 crc kubenswrapper[4717]: I1002 14:27:48.620914 4717 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:27:48 crc kubenswrapper[4717]: I1002 14:27:48.621463 4717 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cf59a8736e6c20aa8e883ecce2166ade5bc372043dd61962e7279881460fe4eb"} pod="openshift-machine-config-operator/machine-config-daemon-sk55f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 14:27:48 crc kubenswrapper[4717]: I1002 14:27:48.621520 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" containerID="cri-o://cf59a8736e6c20aa8e883ecce2166ade5bc372043dd61962e7279881460fe4eb" gracePeriod=600 Oct 02 14:27:49 crc kubenswrapper[4717]: I1002 14:27:49.085834 4717 generic.go:334] "Generic (PLEG): container finished" podID="405aba30-0ff3-4fca-a5da-09c35263665d" containerID="cf59a8736e6c20aa8e883ecce2166ade5bc372043dd61962e7279881460fe4eb" exitCode=0 Oct 02 14:27:49 crc kubenswrapper[4717]: I1002 14:27:49.085923 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerDied","Data":"cf59a8736e6c20aa8e883ecce2166ade5bc372043dd61962e7279881460fe4eb"} Oct 02 14:27:49 crc kubenswrapper[4717]: I1002 14:27:49.086135 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"0e3040e8a9e53a53556b70fb7f1b6868f918c9f54853ff50605f472c544d4044"} Oct 02 14:27:49 crc kubenswrapper[4717]: I1002 14:27:49.086162 4717 scope.go:117] "RemoveContainer" containerID="848c8649df6859afc8e921e677ccf05455f8f819711078022fa1f6d6d503e134" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.415588 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xmn9b"] Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.417167 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.437032 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xmn9b"] Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.598616 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52e2370e-5db6-4806-b1fb-056bd6f55324-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.598674 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.598718 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-bound-sa-token\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.598741 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-registry-tls\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.598769 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz59b\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-kube-api-access-jz59b\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.598787 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52e2370e-5db6-4806-b1fb-056bd6f55324-registry-certificates\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.598876 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52e2370e-5db6-4806-b1fb-056bd6f55324-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.598948 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52e2370e-5db6-4806-b1fb-056bd6f55324-trusted-ca\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.640659 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.700358 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz59b\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-kube-api-access-jz59b\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.700463 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52e2370e-5db6-4806-b1fb-056bd6f55324-registry-certificates\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.700495 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52e2370e-5db6-4806-b1fb-056bd6f55324-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.700528 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52e2370e-5db6-4806-b1fb-056bd6f55324-trusted-ca\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.700565 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52e2370e-5db6-4806-b1fb-056bd6f55324-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.700840 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-bound-sa-token\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.700881 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-registry-tls\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.701235 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52e2370e-5db6-4806-b1fb-056bd6f55324-ca-trust-extracted\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.701906 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52e2370e-5db6-4806-b1fb-056bd6f55324-trusted-ca\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.702404 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52e2370e-5db6-4806-b1fb-056bd6f55324-registry-certificates\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.709557 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-registry-tls\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.709578 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52e2370e-5db6-4806-b1fb-056bd6f55324-installation-pull-secrets\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.717594 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-bound-sa-token\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.718266 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz59b\" (UniqueName: \"kubernetes.io/projected/52e2370e-5db6-4806-b1fb-056bd6f55324-kube-api-access-jz59b\") pod \"image-registry-66df7c8f76-xmn9b\" (UID: \"52e2370e-5db6-4806-b1fb-056bd6f55324\") " pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.789333 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:15 crc kubenswrapper[4717]: I1002 14:28:15.969674 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-xmn9b"] Oct 02 14:28:16 crc kubenswrapper[4717]: I1002 14:28:16.224677 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" event={"ID":"52e2370e-5db6-4806-b1fb-056bd6f55324","Type":"ContainerStarted","Data":"198f74829b0a3e19447759ba920bb87f49dec566947464d2c0adf18efc4a0da5"} Oct 02 14:28:17 crc kubenswrapper[4717]: I1002 14:28:17.232651 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" event={"ID":"52e2370e-5db6-4806-b1fb-056bd6f55324","Type":"ContainerStarted","Data":"402172b77b6319dbda2d3a80dc097af75cc1f60b2a6ba3cbc1b558280ba5b6e6"} Oct 02 14:28:17 crc kubenswrapper[4717]: I1002 14:28:17.232771 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:17 crc kubenswrapper[4717]: I1002 14:28:17.252276 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" podStartSLOduration=2.252243472 podStartE2EDuration="2.252243472s" podCreationTimestamp="2025-10-02 14:28:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:28:17.252056397 +0000 UTC m=+448.103910873" watchObservedRunningTime="2025-10-02 14:28:17.252243472 +0000 UTC m=+448.104097918" Oct 02 14:28:35 crc kubenswrapper[4717]: I1002 14:28:35.795573 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-xmn9b" Oct 02 14:28:35 crc kubenswrapper[4717]: I1002 14:28:35.862591 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkhsn"] Oct 02 14:29:00 crc kubenswrapper[4717]: I1002 14:29:00.904853 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" podUID="61253815-47e3-4e2c-a2e7-565f128dedef" containerName="registry" containerID="cri-o://1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638" gracePeriod=30 Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.211541 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.322775 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/61253815-47e3-4e2c-a2e7-565f128dedef-installation-pull-secrets\") pod \"61253815-47e3-4e2c-a2e7-565f128dedef\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.323045 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"61253815-47e3-4e2c-a2e7-565f128dedef\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.323081 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/61253815-47e3-4e2c-a2e7-565f128dedef-ca-trust-extracted\") pod \"61253815-47e3-4e2c-a2e7-565f128dedef\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.323102 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-registry-tls\") pod \"61253815-47e3-4e2c-a2e7-565f128dedef\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.323176 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-bound-sa-token\") pod \"61253815-47e3-4e2c-a2e7-565f128dedef\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.323205 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxwn5\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-kube-api-access-zxwn5\") pod \"61253815-47e3-4e2c-a2e7-565f128dedef\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.323869 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-trusted-ca\") pod \"61253815-47e3-4e2c-a2e7-565f128dedef\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.323951 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-registry-certificates\") pod \"61253815-47e3-4e2c-a2e7-565f128dedef\" (UID: \"61253815-47e3-4e2c-a2e7-565f128dedef\") " Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.324451 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "61253815-47e3-4e2c-a2e7-565f128dedef" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.324541 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "61253815-47e3-4e2c-a2e7-565f128dedef" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.333621 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "61253815-47e3-4e2c-a2e7-565f128dedef" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.341245 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-kube-api-access-zxwn5" (OuterVolumeSpecName: "kube-api-access-zxwn5") pod "61253815-47e3-4e2c-a2e7-565f128dedef" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef"). InnerVolumeSpecName "kube-api-access-zxwn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.341308 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61253815-47e3-4e2c-a2e7-565f128dedef-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "61253815-47e3-4e2c-a2e7-565f128dedef" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.341400 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "61253815-47e3-4e2c-a2e7-565f128dedef" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.342307 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "61253815-47e3-4e2c-a2e7-565f128dedef" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.345292 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61253815-47e3-4e2c-a2e7-565f128dedef-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "61253815-47e3-4e2c-a2e7-565f128dedef" (UID: "61253815-47e3-4e2c-a2e7-565f128dedef"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.425782 4717 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.426140 4717 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/61253815-47e3-4e2c-a2e7-565f128dedef-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.426153 4717 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/61253815-47e3-4e2c-a2e7-565f128dedef-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.426162 4717 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.426170 4717 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.426178 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxwn5\" (UniqueName: \"kubernetes.io/projected/61253815-47e3-4e2c-a2e7-565f128dedef-kube-api-access-zxwn5\") on node \"crc\" DevicePath \"\"" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.426186 4717 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61253815-47e3-4e2c-a2e7-565f128dedef-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.480332 4717 generic.go:334] "Generic (PLEG): container finished" podID="61253815-47e3-4e2c-a2e7-565f128dedef" containerID="1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638" exitCode=0 Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.480378 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.480375 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" event={"ID":"61253815-47e3-4e2c-a2e7-565f128dedef","Type":"ContainerDied","Data":"1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638"} Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.480507 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pkhsn" event={"ID":"61253815-47e3-4e2c-a2e7-565f128dedef","Type":"ContainerDied","Data":"4f64c35a169e5643f0a5a82c2dc9a8d09f83ce4932f7abf016ef471976508d6b"} Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.480535 4717 scope.go:117] "RemoveContainer" containerID="1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.494234 4717 scope.go:117] "RemoveContainer" containerID="1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638" Oct 02 14:29:01 crc kubenswrapper[4717]: E1002 14:29:01.494744 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638\": container with ID starting with 1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638 not found: ID does not exist" containerID="1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.494791 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638"} err="failed to get container status \"1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638\": rpc error: code = NotFound desc = could not find container \"1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638\": container with ID starting with 1f8bd7640ac3602e8feb9a7f5c5b7b116f7f4b98d37176cde2d480b820ca8638 not found: ID does not exist" Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.514180 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkhsn"] Oct 02 14:29:01 crc kubenswrapper[4717]: I1002 14:29:01.518982 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkhsn"] Oct 02 14:29:02 crc kubenswrapper[4717]: I1002 14:29:02.845478 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61253815-47e3-4e2c-a2e7-565f128dedef" path="/var/lib/kubelet/pods/61253815-47e3-4e2c-a2e7-565f128dedef/volumes" Oct 02 14:29:48 crc kubenswrapper[4717]: I1002 14:29:48.621102 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:29:48 crc kubenswrapper[4717]: I1002 14:29:48.621917 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.144385 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24"] Oct 02 14:30:00 crc kubenswrapper[4717]: E1002 14:30:00.145551 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61253815-47e3-4e2c-a2e7-565f128dedef" containerName="registry" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.145578 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="61253815-47e3-4e2c-a2e7-565f128dedef" containerName="registry" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.145749 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="61253815-47e3-4e2c-a2e7-565f128dedef" containerName="registry" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.146996 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.152920 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.153411 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.160886 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24"] Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.258204 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t64k7\" (UniqueName: \"kubernetes.io/projected/e1783d69-2645-4d2d-83ae-7bf4baca44f5-kube-api-access-t64k7\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.258278 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1783d69-2645-4d2d-83ae-7bf4baca44f5-config-volume\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.258353 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1783d69-2645-4d2d-83ae-7bf4baca44f5-secret-volume\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.359476 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t64k7\" (UniqueName: \"kubernetes.io/projected/e1783d69-2645-4d2d-83ae-7bf4baca44f5-kube-api-access-t64k7\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.359543 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1783d69-2645-4d2d-83ae-7bf4baca44f5-config-volume\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.359583 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1783d69-2645-4d2d-83ae-7bf4baca44f5-secret-volume\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.360644 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1783d69-2645-4d2d-83ae-7bf4baca44f5-config-volume\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.366071 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1783d69-2645-4d2d-83ae-7bf4baca44f5-secret-volume\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.375099 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t64k7\" (UniqueName: \"kubernetes.io/projected/e1783d69-2645-4d2d-83ae-7bf4baca44f5-kube-api-access-t64k7\") pod \"collect-profiles-29323590-dwq24\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.468810 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:00 crc kubenswrapper[4717]: I1002 14:30:00.866442 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24"] Oct 02 14:30:01 crc kubenswrapper[4717]: I1002 14:30:01.843972 4717 generic.go:334] "Generic (PLEG): container finished" podID="e1783d69-2645-4d2d-83ae-7bf4baca44f5" containerID="41cf9a4e489374d84283c468cd63cdceef184ec2dd1bf82b1e56551215be7b0f" exitCode=0 Oct 02 14:30:01 crc kubenswrapper[4717]: I1002 14:30:01.844079 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" event={"ID":"e1783d69-2645-4d2d-83ae-7bf4baca44f5","Type":"ContainerDied","Data":"41cf9a4e489374d84283c468cd63cdceef184ec2dd1bf82b1e56551215be7b0f"} Oct 02 14:30:01 crc kubenswrapper[4717]: I1002 14:30:01.844309 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" event={"ID":"e1783d69-2645-4d2d-83ae-7bf4baca44f5","Type":"ContainerStarted","Data":"b3446b2a6a5dae52ac1f174f133fe8fcc8e62d73fc5ab3eff10e6a4598fdddb3"} Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.023864 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.193611 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1783d69-2645-4d2d-83ae-7bf4baca44f5-config-volume\") pod \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.193725 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t64k7\" (UniqueName: \"kubernetes.io/projected/e1783d69-2645-4d2d-83ae-7bf4baca44f5-kube-api-access-t64k7\") pod \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.193774 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1783d69-2645-4d2d-83ae-7bf4baca44f5-secret-volume\") pod \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\" (UID: \"e1783d69-2645-4d2d-83ae-7bf4baca44f5\") " Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.194422 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1783d69-2645-4d2d-83ae-7bf4baca44f5-config-volume" (OuterVolumeSpecName: "config-volume") pod "e1783d69-2645-4d2d-83ae-7bf4baca44f5" (UID: "e1783d69-2645-4d2d-83ae-7bf4baca44f5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.198322 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1783d69-2645-4d2d-83ae-7bf4baca44f5-kube-api-access-t64k7" (OuterVolumeSpecName: "kube-api-access-t64k7") pod "e1783d69-2645-4d2d-83ae-7bf4baca44f5" (UID: "e1783d69-2645-4d2d-83ae-7bf4baca44f5"). InnerVolumeSpecName "kube-api-access-t64k7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.199131 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1783d69-2645-4d2d-83ae-7bf4baca44f5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e1783d69-2645-4d2d-83ae-7bf4baca44f5" (UID: "e1783d69-2645-4d2d-83ae-7bf4baca44f5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.294733 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t64k7\" (UniqueName: \"kubernetes.io/projected/e1783d69-2645-4d2d-83ae-7bf4baca44f5-kube-api-access-t64k7\") on node \"crc\" DevicePath \"\"" Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.294774 4717 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1783d69-2645-4d2d-83ae-7bf4baca44f5-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.294788 4717 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1783d69-2645-4d2d-83ae-7bf4baca44f5-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.854083 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" event={"ID":"e1783d69-2645-4d2d-83ae-7bf4baca44f5","Type":"ContainerDied","Data":"b3446b2a6a5dae52ac1f174f133fe8fcc8e62d73fc5ab3eff10e6a4598fdddb3"} Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.854125 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3446b2a6a5dae52ac1f174f133fe8fcc8e62d73fc5ab3eff10e6a4598fdddb3" Oct 02 14:30:03 crc kubenswrapper[4717]: I1002 14:30:03.854138 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323590-dwq24" Oct 02 14:30:18 crc kubenswrapper[4717]: I1002 14:30:18.620407 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:30:18 crc kubenswrapper[4717]: I1002 14:30:18.621066 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:30:48 crc kubenswrapper[4717]: I1002 14:30:48.620564 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:30:48 crc kubenswrapper[4717]: I1002 14:30:48.621342 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:30:48 crc kubenswrapper[4717]: I1002 14:30:48.621410 4717 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:30:48 crc kubenswrapper[4717]: I1002 14:30:48.622446 4717 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e3040e8a9e53a53556b70fb7f1b6868f918c9f54853ff50605f472c544d4044"} pod="openshift-machine-config-operator/machine-config-daemon-sk55f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 14:30:48 crc kubenswrapper[4717]: I1002 14:30:48.622629 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" containerID="cri-o://0e3040e8a9e53a53556b70fb7f1b6868f918c9f54853ff50605f472c544d4044" gracePeriod=600 Oct 02 14:30:49 crc kubenswrapper[4717]: I1002 14:30:49.137502 4717 generic.go:334] "Generic (PLEG): container finished" podID="405aba30-0ff3-4fca-a5da-09c35263665d" containerID="0e3040e8a9e53a53556b70fb7f1b6868f918c9f54853ff50605f472c544d4044" exitCode=0 Oct 02 14:30:49 crc kubenswrapper[4717]: I1002 14:30:49.137709 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerDied","Data":"0e3040e8a9e53a53556b70fb7f1b6868f918c9f54853ff50605f472c544d4044"} Oct 02 14:30:49 crc kubenswrapper[4717]: I1002 14:30:49.137953 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"70c25bc08fc344bcf8bb59d3376cd774b978162bd5593d10e8b7b82c8502396b"} Oct 02 14:30:49 crc kubenswrapper[4717]: I1002 14:30:49.137979 4717 scope.go:117] "RemoveContainer" containerID="cf59a8736e6c20aa8e883ecce2166ade5bc372043dd61962e7279881460fe4eb" Oct 02 14:31:04 crc kubenswrapper[4717]: I1002 14:31:04.939133 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l7nn7"] Oct 02 14:31:04 crc kubenswrapper[4717]: I1002 14:31:04.940006 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovn-controller" containerID="cri-o://7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec" gracePeriod=30 Oct 02 14:31:04 crc kubenswrapper[4717]: I1002 14:31:04.940327 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="sbdb" containerID="cri-o://99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e" gracePeriod=30 Oct 02 14:31:04 crc kubenswrapper[4717]: I1002 14:31:04.940360 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="nbdb" containerID="cri-o://65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733" gracePeriod=30 Oct 02 14:31:04 crc kubenswrapper[4717]: I1002 14:31:04.940391 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="northd" containerID="cri-o://e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d" gracePeriod=30 Oct 02 14:31:04 crc kubenswrapper[4717]: I1002 14:31:04.940419 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f" gracePeriod=30 Oct 02 14:31:04 crc kubenswrapper[4717]: I1002 14:31:04.940444 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kube-rbac-proxy-node" containerID="cri-o://c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f" gracePeriod=30 Oct 02 14:31:04 crc kubenswrapper[4717]: I1002 14:31:04.940468 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovn-acl-logging" containerID="cri-o://9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f" gracePeriod=30 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.003394 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" containerID="cri-o://f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46" gracePeriod=30 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.223274 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovnkube-controller/3.log" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.234595 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovn-acl-logging/0.log" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.235550 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovn-controller/0.log" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.235959 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46" exitCode=0 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.235987 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733" exitCode=0 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.235995 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f" exitCode=0 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236002 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f" exitCode=0 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236008 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f" exitCode=143 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236014 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec" exitCode=143 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236032 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46"} Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236097 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733"} Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236113 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f"} Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236133 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f"} Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236145 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f"} Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236157 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec"} Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.236140 4717 scope.go:117] "RemoveContainer" containerID="c7aaad1c29238337a8190062214270d0463f8398e13b7ca42eb3ce1d48cfdfa9" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.237908 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/2.log" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.238431 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/1.log" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.238478 4717 generic.go:334] "Generic (PLEG): container finished" podID="424c679b-8db0-4ba4-9c8f-67a65fe38048" containerID="3527acb10ee45ae3438864810d930ced0270423b8149f122db3ebeef60142df5" exitCode=2 Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.238518 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s7n7q" event={"ID":"424c679b-8db0-4ba4-9c8f-67a65fe38048","Type":"ContainerDied","Data":"3527acb10ee45ae3438864810d930ced0270423b8149f122db3ebeef60142df5"} Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.239045 4717 scope.go:117] "RemoveContainer" containerID="3527acb10ee45ae3438864810d930ced0270423b8149f122db3ebeef60142df5" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.239322 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-s7n7q_openshift-multus(424c679b-8db0-4ba4-9c8f-67a65fe38048)\"" pod="openshift-multus/multus-s7n7q" podUID="424c679b-8db0-4ba4-9c8f-67a65fe38048" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.285683 4717 scope.go:117] "RemoveContainer" containerID="65654cd04f6a13fb96357e48bcf4bfbc1ec4d67a433638da5395a038605acf8d" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.304847 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovn-acl-logging/0.log" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.305392 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovn-controller/0.log" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.305784 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.353684 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-nrrgp"] Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.353985 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.353999 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354008 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1783d69-2645-4d2d-83ae-7bf4baca44f5" containerName="collect-profiles" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354014 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1783d69-2645-4d2d-83ae-7bf4baca44f5" containerName="collect-profiles" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354022 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354030 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354040 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovn-acl-logging" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354047 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovn-acl-logging" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354061 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kube-rbac-proxy-node" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354068 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kube-rbac-proxy-node" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354077 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354086 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354095 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354102 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354109 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kube-rbac-proxy-ovn-metrics" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354115 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kube-rbac-proxy-ovn-metrics" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354139 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="sbdb" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354145 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="sbdb" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354154 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="northd" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354162 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="northd" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354169 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="nbdb" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354174 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="nbdb" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354184 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kubecfg-setup" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354190 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kubecfg-setup" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354198 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovn-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354204 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovn-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354323 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kube-rbac-proxy-node" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354339 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354349 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="sbdb" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354359 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="nbdb" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354369 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="kube-rbac-proxy-ovn-metrics" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354377 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovn-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354384 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovn-acl-logging" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354391 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1783d69-2645-4d2d-83ae-7bf4baca44f5" containerName="collect-profiles" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354398 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="northd" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354406 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354412 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354418 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: E1002 14:31:05.354499 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354506 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.354596 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" containerName="ovnkube-controller" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.356809 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.445687 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-script-lib\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446039 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w94qv\" (UniqueName: \"kubernetes.io/projected/4de64e15-550a-4404-92fc-b355535a4bf2-kube-api-access-w94qv\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446071 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-systemd-units\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446094 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-netns\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446123 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-bin\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446128 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446181 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446183 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-slash\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446231 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-log-socket\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446232 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-slash" (OuterVolumeSpecName: "host-slash") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446252 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-config\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446236 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446271 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-var-lib-openvswitch\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446290 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-systemd\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446316 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-ovn-kubernetes\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446268 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446343 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-netd\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446289 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-log-socket" (OuterVolumeSpecName: "log-socket") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446313 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446363 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-ovn\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446380 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446401 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446417 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-env-overrides\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446464 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-node-log\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446494 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446519 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4de64e15-550a-4404-92fc-b355535a4bf2-ovn-node-metrics-cert\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446539 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-kubelet\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446576 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-etc-openvswitch\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446595 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-openvswitch\") pod \"4de64e15-550a-4404-92fc-b355535a4bf2\" (UID: \"4de64e15-550a-4404-92fc-b355535a4bf2\") " Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446423 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446637 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446663 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446715 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-node-log" (OuterVolumeSpecName: "node-log") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446690 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446741 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446746 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446803 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446836 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446837 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-run-ovn-kubernetes\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446877 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovn-node-metrics-cert\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446908 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-systemd-units\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.446974 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-ovn\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447020 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-env-overrides\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447104 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-kubelet\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447128 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-slash\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447187 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-var-lib-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447222 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-log-socket\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447241 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovnkube-config\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447267 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-run-netns\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447291 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-systemd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447329 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovnkube-script-lib\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447365 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447407 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-node-log\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447432 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-cni-netd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447454 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmzwd\" (UniqueName: \"kubernetes.io/projected/b6f07bec-6fc6-4a37-b5af-843cd773809f-kube-api-access-nmzwd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447487 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-cni-bin\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447515 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-etc-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447579 4717 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-slash\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447594 4717 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-log-socket\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447605 4717 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447617 4717 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447628 4717 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447640 4717 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447651 4717 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447662 4717 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447672 4717 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-node-log\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447684 4717 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447696 4717 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447708 4717 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447721 4717 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447733 4717 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4de64e15-550a-4404-92fc-b355535a4bf2-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447745 4717 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447756 4717 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.447775 4717 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.452441 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4de64e15-550a-4404-92fc-b355535a4bf2-kube-api-access-w94qv" (OuterVolumeSpecName: "kube-api-access-w94qv") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "kube-api-access-w94qv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.454039 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4de64e15-550a-4404-92fc-b355535a4bf2-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.462912 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "4de64e15-550a-4404-92fc-b355535a4bf2" (UID: "4de64e15-550a-4404-92fc-b355535a4bf2"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549402 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-node-log\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549452 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-cni-netd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549508 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmzwd\" (UniqueName: \"kubernetes.io/projected/b6f07bec-6fc6-4a37-b5af-843cd773809f-kube-api-access-nmzwd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549535 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-cni-bin\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549553 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-etc-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549554 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-node-log\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549592 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-cni-netd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549616 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549572 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549647 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-run-ovn-kubernetes\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549644 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-cni-bin\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549664 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovn-node-metrics-cert\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549682 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-systemd-units\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549624 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-etc-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549706 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-ovn\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549711 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-run-ovn-kubernetes\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549752 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-systemd-units\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549784 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-env-overrides\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549811 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-ovn\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549816 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-kubelet\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549833 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-kubelet\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549853 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-slash\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549885 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-var-lib-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549911 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-slash\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549962 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-var-lib-openvswitch\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.549917 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-log-socket\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550022 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovnkube-config\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550038 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-log-socket\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550041 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-run-netns\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550061 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-run-netns\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550080 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-systemd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550122 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovnkube-script-lib\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550149 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550168 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-run-systemd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550219 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w94qv\" (UniqueName: \"kubernetes.io/projected/4de64e15-550a-4404-92fc-b355535a4bf2-kube-api-access-w94qv\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550232 4717 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4de64e15-550a-4404-92fc-b355535a4bf2-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550242 4717 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4de64e15-550a-4404-92fc-b355535a4bf2-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550264 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b6f07bec-6fc6-4a37-b5af-843cd773809f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550439 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-env-overrides\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550595 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovnkube-config\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.550919 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovnkube-script-lib\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.554269 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b6f07bec-6fc6-4a37-b5af-843cd773809f-ovn-node-metrics-cert\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.564030 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmzwd\" (UniqueName: \"kubernetes.io/projected/b6f07bec-6fc6-4a37-b5af-843cd773809f-kube-api-access-nmzwd\") pod \"ovnkube-node-nrrgp\" (UID: \"b6f07bec-6fc6-4a37-b5af-843cd773809f\") " pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:05 crc kubenswrapper[4717]: I1002 14:31:05.672851 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.247644 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovn-acl-logging/0.log" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.248544 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-l7nn7_4de64e15-550a-4404-92fc-b355535a4bf2/ovn-controller/0.log" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.248868 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e" exitCode=0 Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.248886 4717 generic.go:334] "Generic (PLEG): container finished" podID="4de64e15-550a-4404-92fc-b355535a4bf2" containerID="e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d" exitCode=0 Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.248974 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.248972 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e"} Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.249073 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d"} Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.249087 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l7nn7" event={"ID":"4de64e15-550a-4404-92fc-b355535a4bf2","Type":"ContainerDied","Data":"dd54b5ec48286ed15d1a837e7d32fc8ea690bdc3b795510005c7abd52f66009b"} Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.249103 4717 scope.go:117] "RemoveContainer" containerID="f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.251322 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/2.log" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.254105 4717 generic.go:334] "Generic (PLEG): container finished" podID="b6f07bec-6fc6-4a37-b5af-843cd773809f" containerID="b6cee1c046f208b15b99deb07efdf89471df2f55aa8bf212bb7d83ff019f026c" exitCode=0 Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.254164 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerDied","Data":"b6cee1c046f208b15b99deb07efdf89471df2f55aa8bf212bb7d83ff019f026c"} Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.254203 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"3d3496e64f68ef518a23b0cc9f96629e2ef9a11b1bf710c480add7683eb89fa9"} Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.275815 4717 scope.go:117] "RemoveContainer" containerID="99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.303573 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l7nn7"] Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.316904 4717 scope.go:117] "RemoveContainer" containerID="65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.320208 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l7nn7"] Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.343779 4717 scope.go:117] "RemoveContainer" containerID="e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.360804 4717 scope.go:117] "RemoveContainer" containerID="1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.375953 4717 scope.go:117] "RemoveContainer" containerID="c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.388991 4717 scope.go:117] "RemoveContainer" containerID="9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.401329 4717 scope.go:117] "RemoveContainer" containerID="7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.426763 4717 scope.go:117] "RemoveContainer" containerID="527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.459736 4717 scope.go:117] "RemoveContainer" containerID="f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.460472 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46\": container with ID starting with f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46 not found: ID does not exist" containerID="f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.460525 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46"} err="failed to get container status \"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46\": rpc error: code = NotFound desc = could not find container \"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46\": container with ID starting with f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46 not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.460558 4717 scope.go:117] "RemoveContainer" containerID="99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.461002 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\": container with ID starting with 99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e not found: ID does not exist" containerID="99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.461045 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e"} err="failed to get container status \"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\": rpc error: code = NotFound desc = could not find container \"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\": container with ID starting with 99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.461076 4717 scope.go:117] "RemoveContainer" containerID="65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.461334 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\": container with ID starting with 65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733 not found: ID does not exist" containerID="65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.461375 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733"} err="failed to get container status \"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\": rpc error: code = NotFound desc = could not find container \"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\": container with ID starting with 65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733 not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.461396 4717 scope.go:117] "RemoveContainer" containerID="e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.461707 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\": container with ID starting with e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d not found: ID does not exist" containerID="e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.461753 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d"} err="failed to get container status \"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\": rpc error: code = NotFound desc = could not find container \"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\": container with ID starting with e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.461783 4717 scope.go:117] "RemoveContainer" containerID="1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.462139 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\": container with ID starting with 1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f not found: ID does not exist" containerID="1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.462167 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f"} err="failed to get container status \"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\": rpc error: code = NotFound desc = could not find container \"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\": container with ID starting with 1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.462182 4717 scope.go:117] "RemoveContainer" containerID="c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.462413 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\": container with ID starting with c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f not found: ID does not exist" containerID="c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.462444 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f"} err="failed to get container status \"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\": rpc error: code = NotFound desc = could not find container \"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\": container with ID starting with c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.462465 4717 scope.go:117] "RemoveContainer" containerID="9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.462691 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\": container with ID starting with 9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f not found: ID does not exist" containerID="9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.462712 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f"} err="failed to get container status \"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\": rpc error: code = NotFound desc = could not find container \"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\": container with ID starting with 9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.462725 4717 scope.go:117] "RemoveContainer" containerID="7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.462945 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\": container with ID starting with 7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec not found: ID does not exist" containerID="7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.462976 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec"} err="failed to get container status \"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\": rpc error: code = NotFound desc = could not find container \"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\": container with ID starting with 7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.462995 4717 scope.go:117] "RemoveContainer" containerID="527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e" Oct 02 14:31:06 crc kubenswrapper[4717]: E1002 14:31:06.463241 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\": container with ID starting with 527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e not found: ID does not exist" containerID="527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.463263 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e"} err="failed to get container status \"527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\": rpc error: code = NotFound desc = could not find container \"527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\": container with ID starting with 527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.463276 4717 scope.go:117] "RemoveContainer" containerID="f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.463578 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46"} err="failed to get container status \"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46\": rpc error: code = NotFound desc = could not find container \"f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46\": container with ID starting with f41cf1b91af387016cf248e15e4b55ff2a57c145bee6f7c54d0be21dc93a9b46 not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.463602 4717 scope.go:117] "RemoveContainer" containerID="99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.463801 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e"} err="failed to get container status \"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\": rpc error: code = NotFound desc = could not find container \"99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e\": container with ID starting with 99c278e2534d3582d5b0156d798a9850e1e19bc9ff55a224975c76b0638afd0e not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.463820 4717 scope.go:117] "RemoveContainer" containerID="65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.464125 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733"} err="failed to get container status \"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\": rpc error: code = NotFound desc = could not find container \"65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733\": container with ID starting with 65ffb52d37ac965bf05003c6796b8ec6f77ee5e7df225226256a316c2de54733 not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.464145 4717 scope.go:117] "RemoveContainer" containerID="e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.464329 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d"} err="failed to get container status \"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\": rpc error: code = NotFound desc = could not find container \"e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d\": container with ID starting with e05db21a5de10fd7490ee85b0e479caf2423ab81dcfbde577cb2ceddb995165d not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.464347 4717 scope.go:117] "RemoveContainer" containerID="1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.464594 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f"} err="failed to get container status \"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\": rpc error: code = NotFound desc = could not find container \"1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f\": container with ID starting with 1ef737a54f0121984fbd05c1c2501c57d5faa0cd0010f852a6352c489a06927f not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.464615 4717 scope.go:117] "RemoveContainer" containerID="c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.464861 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f"} err="failed to get container status \"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\": rpc error: code = NotFound desc = could not find container \"c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f\": container with ID starting with c125b3f3115363bbf9c1d3383a716e4bbd4aa1580f056c3ed810507f7ea6031f not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.464886 4717 scope.go:117] "RemoveContainer" containerID="9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.465209 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f"} err="failed to get container status \"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\": rpc error: code = NotFound desc = could not find container \"9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f\": container with ID starting with 9263aab93d7965a60a28cf44442a68d68527d64dff4af8b417b52187c246af0f not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.465229 4717 scope.go:117] "RemoveContainer" containerID="7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.465476 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec"} err="failed to get container status \"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\": rpc error: code = NotFound desc = could not find container \"7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec\": container with ID starting with 7a100ef26850da9fd6c884b51eac6a9d32125596166f21b4a29c3204306e71ec not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.465501 4717 scope.go:117] "RemoveContainer" containerID="527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.465740 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e"} err="failed to get container status \"527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\": rpc error: code = NotFound desc = could not find container \"527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e\": container with ID starting with 527209811fa713f6ae3fdbd1b9b84921187092111c063733965e862fa470a10e not found: ID does not exist" Oct 02 14:31:06 crc kubenswrapper[4717]: I1002 14:31:06.846336 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4de64e15-550a-4404-92fc-b355535a4bf2" path="/var/lib/kubelet/pods/4de64e15-550a-4404-92fc-b355535a4bf2/volumes" Oct 02 14:31:07 crc kubenswrapper[4717]: I1002 14:31:07.264335 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"00763217ed71cf497c4dec1c91b881d5e7fedf60845af18823a1297a411ca175"} Oct 02 14:31:07 crc kubenswrapper[4717]: I1002 14:31:07.264375 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"f4199626258c9a2f49aee91f5292849a30dd59d1fbc063316d6fc1bc39928e6d"} Oct 02 14:31:07 crc kubenswrapper[4717]: I1002 14:31:07.264384 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"c7e874c4bfece040188244303de48ae7ee53c598c47f748804489728c8e346b0"} Oct 02 14:31:07 crc kubenswrapper[4717]: I1002 14:31:07.264393 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"6bd9ba48b26b82d14e40414640bdef4b7cf69da7400fc6a9e36e2b93ef54b0bf"} Oct 02 14:31:07 crc kubenswrapper[4717]: I1002 14:31:07.264401 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"24881cbffd1d1b073968f616c6acf88568b2e8f30d5e412b7a3881c99f2b1b64"} Oct 02 14:31:07 crc kubenswrapper[4717]: I1002 14:31:07.264409 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"06ec6956262556c6b27714407048d0f9d88dae345f97c256984289f3bfb4a177"} Oct 02 14:31:09 crc kubenswrapper[4717]: I1002 14:31:09.284467 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"c19e7c58e3daf1595ea2265193a7aff83a712cd19a537b5ab37b36f29ef0efca"} Oct 02 14:31:12 crc kubenswrapper[4717]: I1002 14:31:12.302577 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" event={"ID":"b6f07bec-6fc6-4a37-b5af-843cd773809f","Type":"ContainerStarted","Data":"c5176fe3ef79e84d2f683bf5d9190a35d560a2dd1282041721f6bc002a3fa3b0"} Oct 02 14:31:12 crc kubenswrapper[4717]: I1002 14:31:12.304055 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:12 crc kubenswrapper[4717]: I1002 14:31:12.304093 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:12 crc kubenswrapper[4717]: I1002 14:31:12.304154 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:12 crc kubenswrapper[4717]: I1002 14:31:12.329100 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" podStartSLOduration=7.329084964 podStartE2EDuration="7.329084964s" podCreationTimestamp="2025-10-02 14:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:31:12.326111314 +0000 UTC m=+623.177965760" watchObservedRunningTime="2025-10-02 14:31:12.329084964 +0000 UTC m=+623.180939400" Oct 02 14:31:12 crc kubenswrapper[4717]: I1002 14:31:12.334331 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:12 crc kubenswrapper[4717]: I1002 14:31:12.337108 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:15 crc kubenswrapper[4717]: I1002 14:31:15.839001 4717 scope.go:117] "RemoveContainer" containerID="3527acb10ee45ae3438864810d930ced0270423b8149f122db3ebeef60142df5" Oct 02 14:31:15 crc kubenswrapper[4717]: E1002 14:31:15.839631 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-s7n7q_openshift-multus(424c679b-8db0-4ba4-9c8f-67a65fe38048)\"" pod="openshift-multus/multus-s7n7q" podUID="424c679b-8db0-4ba4-9c8f-67a65fe38048" Oct 02 14:31:28 crc kubenswrapper[4717]: I1002 14:31:28.839161 4717 scope.go:117] "RemoveContainer" containerID="3527acb10ee45ae3438864810d930ced0270423b8149f122db3ebeef60142df5" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.399323 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s7n7q_424c679b-8db0-4ba4-9c8f-67a65fe38048/kube-multus/2.log" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.399646 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s7n7q" event={"ID":"424c679b-8db0-4ba4-9c8f-67a65fe38048","Type":"ContainerStarted","Data":"1f3fc001cf4a9f005cc01cd11422c14eaad672925ea4f45f4a46c3ab082ba2ab"} Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.754904 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9"] Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.755795 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.757413 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.767429 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9"] Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.836975 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.837043 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxnvs\" (UniqueName: \"kubernetes.io/projected/8d3a60d1-ca33-4f9e-9499-c7933449692b-kube-api-access-nxnvs\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.837072 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.937797 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.937900 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.937964 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxnvs\" (UniqueName: \"kubernetes.io/projected/8d3a60d1-ca33-4f9e-9499-c7933449692b-kube-api-access-nxnvs\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.938577 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.938577 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:29 crc kubenswrapper[4717]: I1002 14:31:29.960585 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxnvs\" (UniqueName: \"kubernetes.io/projected/8d3a60d1-ca33-4f9e-9499-c7933449692b-kube-api-access-nxnvs\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:30 crc kubenswrapper[4717]: I1002 14:31:30.093733 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:30 crc kubenswrapper[4717]: E1002 14:31:30.113636 4717 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace_8d3a60d1-ca33-4f9e-9499-c7933449692b_0(2ef128d158295634ff87c90a19f3ba4c3511d03097b8aa20d996b0e7c56956e4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 14:31:30 crc kubenswrapper[4717]: E1002 14:31:30.113698 4717 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace_8d3a60d1-ca33-4f9e-9499-c7933449692b_0(2ef128d158295634ff87c90a19f3ba4c3511d03097b8aa20d996b0e7c56956e4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:30 crc kubenswrapper[4717]: E1002 14:31:30.113722 4717 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace_8d3a60d1-ca33-4f9e-9499-c7933449692b_0(2ef128d158295634ff87c90a19f3ba4c3511d03097b8aa20d996b0e7c56956e4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:30 crc kubenswrapper[4717]: E1002 14:31:30.113776 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace(8d3a60d1-ca33-4f9e-9499-c7933449692b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace(8d3a60d1-ca33-4f9e-9499-c7933449692b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace_8d3a60d1-ca33-4f9e-9499-c7933449692b_0(2ef128d158295634ff87c90a19f3ba4c3511d03097b8aa20d996b0e7c56956e4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" Oct 02 14:31:30 crc kubenswrapper[4717]: I1002 14:31:30.403801 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:30 crc kubenswrapper[4717]: I1002 14:31:30.404246 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:30 crc kubenswrapper[4717]: E1002 14:31:30.425801 4717 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace_8d3a60d1-ca33-4f9e-9499-c7933449692b_0(53528be5e02000ffbfaafdfca7a536d8c094f7f0ede24f9b56f3c250f2fc9d98): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 02 14:31:30 crc kubenswrapper[4717]: E1002 14:31:30.425876 4717 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace_8d3a60d1-ca33-4f9e-9499-c7933449692b_0(53528be5e02000ffbfaafdfca7a536d8c094f7f0ede24f9b56f3c250f2fc9d98): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:30 crc kubenswrapper[4717]: E1002 14:31:30.425903 4717 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace_8d3a60d1-ca33-4f9e-9499-c7933449692b_0(53528be5e02000ffbfaafdfca7a536d8c094f7f0ede24f9b56f3c250f2fc9d98): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:30 crc kubenswrapper[4717]: E1002 14:31:30.425980 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace(8d3a60d1-ca33-4f9e-9499-c7933449692b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace(8d3a60d1-ca33-4f9e-9499-c7933449692b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_openshift-marketplace_8d3a60d1-ca33-4f9e-9499-c7933449692b_0(53528be5e02000ffbfaafdfca7a536d8c094f7f0ede24f9b56f3c250f2fc9d98): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" Oct 02 14:31:35 crc kubenswrapper[4717]: I1002 14:31:35.698764 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nrrgp" Oct 02 14:31:41 crc kubenswrapper[4717]: I1002 14:31:41.838027 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:41 crc kubenswrapper[4717]: I1002 14:31:41.838711 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:42 crc kubenswrapper[4717]: I1002 14:31:42.019642 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9"] Oct 02 14:31:42 crc kubenswrapper[4717]: I1002 14:31:42.471048 4717 generic.go:334] "Generic (PLEG): container finished" podID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerID="c25f9bb706a2740e146e6ab5bdc592d497ebdcf3b7ff0eb92ebbf183725242f7" exitCode=0 Oct 02 14:31:42 crc kubenswrapper[4717]: I1002 14:31:42.471191 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" event={"ID":"8d3a60d1-ca33-4f9e-9499-c7933449692b","Type":"ContainerDied","Data":"c25f9bb706a2740e146e6ab5bdc592d497ebdcf3b7ff0eb92ebbf183725242f7"} Oct 02 14:31:42 crc kubenswrapper[4717]: I1002 14:31:42.471457 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" event={"ID":"8d3a60d1-ca33-4f9e-9499-c7933449692b","Type":"ContainerStarted","Data":"95c82873ca04d84ccbb286689cd21390744c05b19db637bb37c83758219df02b"} Oct 02 14:31:42 crc kubenswrapper[4717]: I1002 14:31:42.473180 4717 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 14:31:44 crc kubenswrapper[4717]: I1002 14:31:44.485551 4717 generic.go:334] "Generic (PLEG): container finished" podID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerID="e6193623dc3a85fb0a5d9858d05aaba83c7a69008b5b1e0f807456c79e20fb0a" exitCode=0 Oct 02 14:31:44 crc kubenswrapper[4717]: I1002 14:31:44.485694 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" event={"ID":"8d3a60d1-ca33-4f9e-9499-c7933449692b","Type":"ContainerDied","Data":"e6193623dc3a85fb0a5d9858d05aaba83c7a69008b5b1e0f807456c79e20fb0a"} Oct 02 14:31:45 crc kubenswrapper[4717]: I1002 14:31:45.499962 4717 generic.go:334] "Generic (PLEG): container finished" podID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerID="612346ec7d03d17aa4f6fe1e343d0c491b24a1a5d5f0b4119a75f22a1166a6f4" exitCode=0 Oct 02 14:31:45 crc kubenswrapper[4717]: I1002 14:31:45.500027 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" event={"ID":"8d3a60d1-ca33-4f9e-9499-c7933449692b","Type":"ContainerDied","Data":"612346ec7d03d17aa4f6fe1e343d0c491b24a1a5d5f0b4119a75f22a1166a6f4"} Oct 02 14:31:46 crc kubenswrapper[4717]: I1002 14:31:46.804756 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:46 crc kubenswrapper[4717]: I1002 14:31:46.958871 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-util\") pod \"8d3a60d1-ca33-4f9e-9499-c7933449692b\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " Oct 02 14:31:46 crc kubenswrapper[4717]: I1002 14:31:46.958918 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxnvs\" (UniqueName: \"kubernetes.io/projected/8d3a60d1-ca33-4f9e-9499-c7933449692b-kube-api-access-nxnvs\") pod \"8d3a60d1-ca33-4f9e-9499-c7933449692b\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " Oct 02 14:31:46 crc kubenswrapper[4717]: I1002 14:31:46.959022 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-bundle\") pod \"8d3a60d1-ca33-4f9e-9499-c7933449692b\" (UID: \"8d3a60d1-ca33-4f9e-9499-c7933449692b\") " Oct 02 14:31:46 crc kubenswrapper[4717]: I1002 14:31:46.961066 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-bundle" (OuterVolumeSpecName: "bundle") pod "8d3a60d1-ca33-4f9e-9499-c7933449692b" (UID: "8d3a60d1-ca33-4f9e-9499-c7933449692b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:31:46 crc kubenswrapper[4717]: I1002 14:31:46.965223 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d3a60d1-ca33-4f9e-9499-c7933449692b-kube-api-access-nxnvs" (OuterVolumeSpecName: "kube-api-access-nxnvs") pod "8d3a60d1-ca33-4f9e-9499-c7933449692b" (UID: "8d3a60d1-ca33-4f9e-9499-c7933449692b"). InnerVolumeSpecName "kube-api-access-nxnvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:31:46 crc kubenswrapper[4717]: I1002 14:31:46.974260 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-util" (OuterVolumeSpecName: "util") pod "8d3a60d1-ca33-4f9e-9499-c7933449692b" (UID: "8d3a60d1-ca33-4f9e-9499-c7933449692b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:31:47 crc kubenswrapper[4717]: I1002 14:31:47.061716 4717 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:47 crc kubenswrapper[4717]: I1002 14:31:47.061814 4717 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8d3a60d1-ca33-4f9e-9499-c7933449692b-util\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:47 crc kubenswrapper[4717]: I1002 14:31:47.061834 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxnvs\" (UniqueName: \"kubernetes.io/projected/8d3a60d1-ca33-4f9e-9499-c7933449692b-kube-api-access-nxnvs\") on node \"crc\" DevicePath \"\"" Oct 02 14:31:47 crc kubenswrapper[4717]: I1002 14:31:47.523295 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" event={"ID":"8d3a60d1-ca33-4f9e-9499-c7933449692b","Type":"ContainerDied","Data":"95c82873ca04d84ccbb286689cd21390744c05b19db637bb37c83758219df02b"} Oct 02 14:31:47 crc kubenswrapper[4717]: I1002 14:31:47.523360 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9" Oct 02 14:31:47 crc kubenswrapper[4717]: I1002 14:31:47.523396 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95c82873ca04d84ccbb286689cd21390744c05b19db637bb37c83758219df02b" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.932575 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk"] Oct 02 14:31:57 crc kubenswrapper[4717]: E1002 14:31:57.933424 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerName="pull" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.933439 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerName="pull" Oct 02 14:31:57 crc kubenswrapper[4717]: E1002 14:31:57.933450 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerName="extract" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.933458 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerName="extract" Oct 02 14:31:57 crc kubenswrapper[4717]: E1002 14:31:57.933471 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerName="util" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.933479 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerName="util" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.933591 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d3a60d1-ca33-4f9e-9499-c7933449692b" containerName="extract" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.934058 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.936874 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.936883 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.937321 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.937441 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-85w92" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.937482 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 02 14:31:57 crc kubenswrapper[4717]: I1002 14:31:57.950236 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk"] Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.087289 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ab57e351-5255-44f7-a345-ce572861d96c-apiservice-cert\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.087346 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfrmx\" (UniqueName: \"kubernetes.io/projected/ab57e351-5255-44f7-a345-ce572861d96c-kube-api-access-qfrmx\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.087421 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ab57e351-5255-44f7-a345-ce572861d96c-webhook-cert\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.188613 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ab57e351-5255-44f7-a345-ce572861d96c-apiservice-cert\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.189966 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfrmx\" (UniqueName: \"kubernetes.io/projected/ab57e351-5255-44f7-a345-ce572861d96c-kube-api-access-qfrmx\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.190118 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ab57e351-5255-44f7-a345-ce572861d96c-webhook-cert\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.197500 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ab57e351-5255-44f7-a345-ce572861d96c-apiservice-cert\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.197511 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ab57e351-5255-44f7-a345-ce572861d96c-webhook-cert\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.211055 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfrmx\" (UniqueName: \"kubernetes.io/projected/ab57e351-5255-44f7-a345-ce572861d96c-kube-api-access-qfrmx\") pod \"metallb-operator-controller-manager-7bfd4cf5df-b9zdk\" (UID: \"ab57e351-5255-44f7-a345-ce572861d96c\") " pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.247972 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.270093 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv"] Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.270698 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.276157 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.276434 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.276710 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-bdq44" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.296843 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv"] Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.391984 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cca01e6e-5d15-486b-8c28-3f21c54fa045-apiservice-cert\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.392127 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cca01e6e-5d15-486b-8c28-3f21c54fa045-webhook-cert\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.392279 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xv2f2\" (UniqueName: \"kubernetes.io/projected/cca01e6e-5d15-486b-8c28-3f21c54fa045-kube-api-access-xv2f2\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.488339 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk"] Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.496129 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xv2f2\" (UniqueName: \"kubernetes.io/projected/cca01e6e-5d15-486b-8c28-3f21c54fa045-kube-api-access-xv2f2\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.496382 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cca01e6e-5d15-486b-8c28-3f21c54fa045-apiservice-cert\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.496448 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cca01e6e-5d15-486b-8c28-3f21c54fa045-webhook-cert\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.500967 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cca01e6e-5d15-486b-8c28-3f21c54fa045-webhook-cert\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.501346 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cca01e6e-5d15-486b-8c28-3f21c54fa045-apiservice-cert\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.512878 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xv2f2\" (UniqueName: \"kubernetes.io/projected/cca01e6e-5d15-486b-8c28-3f21c54fa045-kube-api-access-xv2f2\") pod \"metallb-operator-webhook-server-5d64c8548f-x44sv\" (UID: \"cca01e6e-5d15-486b-8c28-3f21c54fa045\") " pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.575334 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" event={"ID":"ab57e351-5255-44f7-a345-ce572861d96c","Type":"ContainerStarted","Data":"76a36eb126eac4aefa7224ae91156d5b06ba8777b0884a010e620c0476cfa95a"} Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.617599 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:31:58 crc kubenswrapper[4717]: I1002 14:31:58.790840 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv"] Oct 02 14:31:58 crc kubenswrapper[4717]: W1002 14:31:58.805266 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcca01e6e_5d15_486b_8c28_3f21c54fa045.slice/crio-716e70b557d302b0b658ead3096d2f65fe0b5bc5ddffd12a7b5896f943a79af1 WatchSource:0}: Error finding container 716e70b557d302b0b658ead3096d2f65fe0b5bc5ddffd12a7b5896f943a79af1: Status 404 returned error can't find the container with id 716e70b557d302b0b658ead3096d2f65fe0b5bc5ddffd12a7b5896f943a79af1 Oct 02 14:31:59 crc kubenswrapper[4717]: I1002 14:31:59.582581 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" event={"ID":"cca01e6e-5d15-486b-8c28-3f21c54fa045","Type":"ContainerStarted","Data":"716e70b557d302b0b658ead3096d2f65fe0b5bc5ddffd12a7b5896f943a79af1"} Oct 02 14:32:02 crc kubenswrapper[4717]: I1002 14:32:02.608806 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" event={"ID":"ab57e351-5255-44f7-a345-ce572861d96c","Type":"ContainerStarted","Data":"d1a97848dffb342f5fed987afe0b89413584bd30b3cca359544aa970406d7788"} Oct 02 14:32:02 crc kubenswrapper[4717]: I1002 14:32:02.610046 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:32:03 crc kubenswrapper[4717]: I1002 14:32:03.615698 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" event={"ID":"cca01e6e-5d15-486b-8c28-3f21c54fa045","Type":"ContainerStarted","Data":"c2159a017155b70ea50dfcc581691343f5ab48bd93e8e3182cbb022e1cf564d2"} Oct 02 14:32:03 crc kubenswrapper[4717]: I1002 14:32:03.638719 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" podStartSLOduration=1.231365055 podStartE2EDuration="5.638703497s" podCreationTimestamp="2025-10-02 14:31:58 +0000 UTC" firstStartedPulling="2025-10-02 14:31:58.811139599 +0000 UTC m=+669.662994045" lastFinishedPulling="2025-10-02 14:32:03.218478041 +0000 UTC m=+674.070332487" observedRunningTime="2025-10-02 14:32:03.636545208 +0000 UTC m=+674.488399664" watchObservedRunningTime="2025-10-02 14:32:03.638703497 +0000 UTC m=+674.490557943" Oct 02 14:32:03 crc kubenswrapper[4717]: I1002 14:32:03.639297 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" podStartSLOduration=3.40670012 podStartE2EDuration="6.639293542s" podCreationTimestamp="2025-10-02 14:31:57 +0000 UTC" firstStartedPulling="2025-10-02 14:31:58.509687484 +0000 UTC m=+669.361541930" lastFinishedPulling="2025-10-02 14:32:01.742280916 +0000 UTC m=+672.594135352" observedRunningTime="2025-10-02 14:32:02.644320657 +0000 UTC m=+673.496175103" watchObservedRunningTime="2025-10-02 14:32:03.639293542 +0000 UTC m=+674.491147988" Oct 02 14:32:04 crc kubenswrapper[4717]: I1002 14:32:04.619322 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:32:18 crc kubenswrapper[4717]: I1002 14:32:18.624038 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5d64c8548f-x44sv" Oct 02 14:32:38 crc kubenswrapper[4717]: I1002 14:32:38.251083 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7bfd4cf5df-b9zdk" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.116607 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8"] Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.117464 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.121336 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-bhhnp" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.121502 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.126639 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-ql5c8"] Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.128781 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.130392 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.130617 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.133705 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8"] Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.199548 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-dwj7m"] Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.200335 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.202125 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.202312 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.202545 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.202618 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-fztjb" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.224869 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-jj4xv"] Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.225808 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227399 4717 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227463 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0d083165-db97-4223-a0ea-808e1d3501de-cert\") pod \"frr-k8s-webhook-server-64bf5d555-29hc8\" (UID: \"0d083165-db97-4223-a0ea-808e1d3501de\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227549 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-sockets\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227571 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78pcz\" (UniqueName: \"kubernetes.io/projected/0d083165-db97-4223-a0ea-808e1d3501de-kube-api-access-78pcz\") pod \"frr-k8s-webhook-server-64bf5d555-29hc8\" (UID: \"0d083165-db97-4223-a0ea-808e1d3501de\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227692 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-startup\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227721 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdxkn\" (UniqueName: \"kubernetes.io/projected/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-kube-api-access-hdxkn\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227750 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-conf\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227961 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-reloader\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.227994 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-metrics-certs\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.228079 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-metrics\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.238193 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-jj4xv"] Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.328966 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdxkn\" (UniqueName: \"kubernetes.io/projected/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-kube-api-access-hdxkn\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329016 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/db72cefb-692a-456b-8326-073a98c6f7a7-metallb-excludel2\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329046 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-conf\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329084 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh2vk\" (UniqueName: \"kubernetes.io/projected/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-kube-api-access-rh2vk\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329149 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-metrics-certs\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329181 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-reloader\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329198 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-metrics-certs\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329219 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h2rr\" (UniqueName: \"kubernetes.io/projected/db72cefb-692a-456b-8326-073a98c6f7a7-kube-api-access-5h2rr\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329253 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-metrics-certs\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329268 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-metrics\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329301 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0d083165-db97-4223-a0ea-808e1d3501de-cert\") pod \"frr-k8s-webhook-server-64bf5d555-29hc8\" (UID: \"0d083165-db97-4223-a0ea-808e1d3501de\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329324 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-sockets\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329344 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78pcz\" (UniqueName: \"kubernetes.io/projected/0d083165-db97-4223-a0ea-808e1d3501de-kube-api-access-78pcz\") pod \"frr-k8s-webhook-server-64bf5d555-29hc8\" (UID: \"0d083165-db97-4223-a0ea-808e1d3501de\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329366 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-cert\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329381 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-startup\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329397 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329554 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-conf\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329749 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-reloader\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.329826 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-metrics\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.330399 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-startup\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.330509 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-frr-sockets\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.341965 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-metrics-certs\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.341997 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0d083165-db97-4223-a0ea-808e1d3501de-cert\") pod \"frr-k8s-webhook-server-64bf5d555-29hc8\" (UID: \"0d083165-db97-4223-a0ea-808e1d3501de\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.349645 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78pcz\" (UniqueName: \"kubernetes.io/projected/0d083165-db97-4223-a0ea-808e1d3501de-kube-api-access-78pcz\") pod \"frr-k8s-webhook-server-64bf5d555-29hc8\" (UID: \"0d083165-db97-4223-a0ea-808e1d3501de\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.352850 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdxkn\" (UniqueName: \"kubernetes.io/projected/cfa1a1ce-0405-488f-91e9-f12ccfd2636c-kube-api-access-hdxkn\") pod \"frr-k8s-ql5c8\" (UID: \"cfa1a1ce-0405-488f-91e9-f12ccfd2636c\") " pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.430671 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-metrics-certs\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.430737 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.430754 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-cert\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.430776 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/db72cefb-692a-456b-8326-073a98c6f7a7-metallb-excludel2\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.430832 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh2vk\" (UniqueName: \"kubernetes.io/projected/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-kube-api-access-rh2vk\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.430854 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-metrics-certs\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.430874 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h2rr\" (UniqueName: \"kubernetes.io/projected/db72cefb-692a-456b-8326-073a98c6f7a7-kube-api-access-5h2rr\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: E1002 14:32:39.430946 4717 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 02 14:32:39 crc kubenswrapper[4717]: E1002 14:32:39.431033 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist podName:db72cefb-692a-456b-8326-073a98c6f7a7 nodeName:}" failed. No retries permitted until 2025-10-02 14:32:39.931010319 +0000 UTC m=+710.782864825 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist") pod "speaker-dwj7m" (UID: "db72cefb-692a-456b-8326-073a98c6f7a7") : secret "metallb-memberlist" not found Oct 02 14:32:39 crc kubenswrapper[4717]: E1002 14:32:39.431349 4717 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Oct 02 14:32:39 crc kubenswrapper[4717]: E1002 14:32:39.431411 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-metrics-certs podName:a23b49f3-2a36-424a-a9d3-7f3a2868ede2 nodeName:}" failed. No retries permitted until 2025-10-02 14:32:39.93139568 +0000 UTC m=+710.783250216 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-metrics-certs") pod "controller-68d546b9d8-jj4xv" (UID: "a23b49f3-2a36-424a-a9d3-7f3a2868ede2") : secret "controller-certs-secret" not found Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.431469 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/db72cefb-692a-456b-8326-073a98c6f7a7-metallb-excludel2\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.434150 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-metrics-certs\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.434554 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-cert\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.447315 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h2rr\" (UniqueName: \"kubernetes.io/projected/db72cefb-692a-456b-8326-073a98c6f7a7-kube-api-access-5h2rr\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.450161 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.455570 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh2vk\" (UniqueName: \"kubernetes.io/projected/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-kube-api-access-rh2vk\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.458699 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.816057 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerStarted","Data":"cc30fbc618901e3e06a0121bad0bd069bb9e39dcbd24aa5011b48f61ed5ff767"} Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.864387 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8"] Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.936979 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.937167 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-metrics-certs\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:39 crc kubenswrapper[4717]: E1002 14:32:39.937178 4717 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 02 14:32:39 crc kubenswrapper[4717]: E1002 14:32:39.937323 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist podName:db72cefb-692a-456b-8326-073a98c6f7a7 nodeName:}" failed. No retries permitted until 2025-10-02 14:32:40.937299955 +0000 UTC m=+711.789154431 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist") pod "speaker-dwj7m" (UID: "db72cefb-692a-456b-8326-073a98c6f7a7") : secret "metallb-memberlist" not found Oct 02 14:32:39 crc kubenswrapper[4717]: I1002 14:32:39.944290 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a23b49f3-2a36-424a-a9d3-7f3a2868ede2-metrics-certs\") pod \"controller-68d546b9d8-jj4xv\" (UID: \"a23b49f3-2a36-424a-a9d3-7f3a2868ede2\") " pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:40 crc kubenswrapper[4717]: I1002 14:32:40.138748 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:40 crc kubenswrapper[4717]: I1002 14:32:40.575598 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-jj4xv"] Oct 02 14:32:40 crc kubenswrapper[4717]: I1002 14:32:40.821436 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" event={"ID":"0d083165-db97-4223-a0ea-808e1d3501de","Type":"ContainerStarted","Data":"5b6d8eb2d32baa9be57bdcd30445dbd6567d1877d46b940fc138bf19657ce525"} Oct 02 14:32:40 crc kubenswrapper[4717]: I1002 14:32:40.822617 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-jj4xv" event={"ID":"a23b49f3-2a36-424a-a9d3-7f3a2868ede2","Type":"ContainerStarted","Data":"b92b2dcf470744acc7a7e4b29daa17c579ad7d67e7da1b6ed411e83f65dd6487"} Oct 02 14:32:40 crc kubenswrapper[4717]: I1002 14:32:40.822642 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-jj4xv" event={"ID":"a23b49f3-2a36-424a-a9d3-7f3a2868ede2","Type":"ContainerStarted","Data":"ebb57000ec733d3cdc33827b0de664372276e4a9e4cf3283bc63ffb17ed12643"} Oct 02 14:32:40 crc kubenswrapper[4717]: I1002 14:32:40.955503 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:40 crc kubenswrapper[4717]: I1002 14:32:40.961411 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/db72cefb-692a-456b-8326-073a98c6f7a7-memberlist\") pod \"speaker-dwj7m\" (UID: \"db72cefb-692a-456b-8326-073a98c6f7a7\") " pod="metallb-system/speaker-dwj7m" Oct 02 14:32:41 crc kubenswrapper[4717]: I1002 14:32:41.014121 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-dwj7m" Oct 02 14:32:41 crc kubenswrapper[4717]: I1002 14:32:41.838494 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dwj7m" event={"ID":"db72cefb-692a-456b-8326-073a98c6f7a7","Type":"ContainerStarted","Data":"17084b17b9ea20eae3f32306e5969d42c7ea82bf4dfcfe925f0fd479bfc4a529"} Oct 02 14:32:41 crc kubenswrapper[4717]: I1002 14:32:41.838833 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dwj7m" event={"ID":"db72cefb-692a-456b-8326-073a98c6f7a7","Type":"ContainerStarted","Data":"334ae3e927f04fe9f9fe75bb8738fbd8420208570f25406e83571a32fef4244f"} Oct 02 14:32:44 crc kubenswrapper[4717]: I1002 14:32:44.874451 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-jj4xv" event={"ID":"a23b49f3-2a36-424a-a9d3-7f3a2868ede2","Type":"ContainerStarted","Data":"8a9df9637658259b21a2f1a0dc43a4a2fa1aabc3252efecb0cf3bf15b2afda8a"} Oct 02 14:32:44 crc kubenswrapper[4717]: I1002 14:32:44.874779 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:44 crc kubenswrapper[4717]: I1002 14:32:44.878091 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-dwj7m" event={"ID":"db72cefb-692a-456b-8326-073a98c6f7a7","Type":"ContainerStarted","Data":"f18d05988e995b7d7caed3b18f5b342de237b8be29fb368bbc3a9940a20b242d"} Oct 02 14:32:44 crc kubenswrapper[4717]: I1002 14:32:44.878545 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-dwj7m" Oct 02 14:32:44 crc kubenswrapper[4717]: I1002 14:32:44.897135 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-jj4xv" podStartSLOduration=3.013006202 podStartE2EDuration="5.897118486s" podCreationTimestamp="2025-10-02 14:32:39 +0000 UTC" firstStartedPulling="2025-10-02 14:32:40.824337941 +0000 UTC m=+711.676192387" lastFinishedPulling="2025-10-02 14:32:43.708450225 +0000 UTC m=+714.560304671" observedRunningTime="2025-10-02 14:32:44.895336357 +0000 UTC m=+715.747190803" watchObservedRunningTime="2025-10-02 14:32:44.897118486 +0000 UTC m=+715.748972932" Oct 02 14:32:44 crc kubenswrapper[4717]: I1002 14:32:44.913812 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-dwj7m" podStartSLOduration=3.441863059 podStartE2EDuration="5.913793663s" podCreationTimestamp="2025-10-02 14:32:39 +0000 UTC" firstStartedPulling="2025-10-02 14:32:41.3036615 +0000 UTC m=+712.155515966" lastFinishedPulling="2025-10-02 14:32:43.775592124 +0000 UTC m=+714.627446570" observedRunningTime="2025-10-02 14:32:44.910211605 +0000 UTC m=+715.762066051" watchObservedRunningTime="2025-10-02 14:32:44.913793663 +0000 UTC m=+715.765648109" Oct 02 14:32:46 crc kubenswrapper[4717]: I1002 14:32:46.892677 4717 generic.go:334] "Generic (PLEG): container finished" podID="cfa1a1ce-0405-488f-91e9-f12ccfd2636c" containerID="08aa86ea12679158f9b03eb177cdd54afd97b8eac5a6f6779942a8c2e83b513f" exitCode=0 Oct 02 14:32:46 crc kubenswrapper[4717]: I1002 14:32:46.892801 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerDied","Data":"08aa86ea12679158f9b03eb177cdd54afd97b8eac5a6f6779942a8c2e83b513f"} Oct 02 14:32:46 crc kubenswrapper[4717]: I1002 14:32:46.896177 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" event={"ID":"0d083165-db97-4223-a0ea-808e1d3501de","Type":"ContainerStarted","Data":"40bbcee1a7c78195fafb4e43fe89f67a8db3ec7434c132d5624fd3e408dfe26c"} Oct 02 14:32:46 crc kubenswrapper[4717]: I1002 14:32:46.896439 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:46 crc kubenswrapper[4717]: I1002 14:32:46.959676 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" podStartSLOduration=1.5494606480000002 podStartE2EDuration="7.959651442s" podCreationTimestamp="2025-10-02 14:32:39 +0000 UTC" firstStartedPulling="2025-10-02 14:32:39.871381848 +0000 UTC m=+710.723236334" lastFinishedPulling="2025-10-02 14:32:46.281572682 +0000 UTC m=+717.133427128" observedRunningTime="2025-10-02 14:32:46.952794514 +0000 UTC m=+717.804648990" watchObservedRunningTime="2025-10-02 14:32:46.959651442 +0000 UTC m=+717.811505928" Oct 02 14:32:47 crc kubenswrapper[4717]: I1002 14:32:47.905798 4717 generic.go:334] "Generic (PLEG): container finished" podID="cfa1a1ce-0405-488f-91e9-f12ccfd2636c" containerID="16060e8437ca5e51d39e9c2645b7de7ca786f2fa131a57ba832ec3326ca4f519" exitCode=0 Oct 02 14:32:47 crc kubenswrapper[4717]: I1002 14:32:47.905898 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerDied","Data":"16060e8437ca5e51d39e9c2645b7de7ca786f2fa131a57ba832ec3326ca4f519"} Oct 02 14:32:48 crc kubenswrapper[4717]: I1002 14:32:48.620559 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:32:48 crc kubenswrapper[4717]: I1002 14:32:48.620951 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:32:48 crc kubenswrapper[4717]: I1002 14:32:48.913728 4717 generic.go:334] "Generic (PLEG): container finished" podID="cfa1a1ce-0405-488f-91e9-f12ccfd2636c" containerID="a3f447ac8c392a43f4793bf89de45e5fcd201cc278ede571e9da1a0b51a1efd6" exitCode=0 Oct 02 14:32:48 crc kubenswrapper[4717]: I1002 14:32:48.913773 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerDied","Data":"a3f447ac8c392a43f4793bf89de45e5fcd201cc278ede571e9da1a0b51a1efd6"} Oct 02 14:32:49 crc kubenswrapper[4717]: I1002 14:32:49.923831 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerStarted","Data":"a1a0201288c2edad4dac31c7e1f0ab254d534b6e4e43e388e57aef5dd042e4eb"} Oct 02 14:32:49 crc kubenswrapper[4717]: I1002 14:32:49.924566 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerStarted","Data":"b0b1fce17ef13f6736bab49ffb2c76ee9ff628fd70d0539ec75dde063903b818"} Oct 02 14:32:49 crc kubenswrapper[4717]: I1002 14:32:49.924638 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerStarted","Data":"8ea8ad83caf9d521ce4959572f4e5c532abe0d9cb2cc41b92f697c61725618a0"} Oct 02 14:32:49 crc kubenswrapper[4717]: I1002 14:32:49.924698 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerStarted","Data":"28ffdf9e0f4de48f75c952f63ba3fcc2d44f8636c2be00b6d351b208ab7a8c95"} Oct 02 14:32:49 crc kubenswrapper[4717]: I1002 14:32:49.924758 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerStarted","Data":"c1c221d1afaf19903b4c4fb0be8482c65963cf8ef0a9228bd386b9430f288807"} Oct 02 14:32:50 crc kubenswrapper[4717]: I1002 14:32:50.144976 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-jj4xv" Oct 02 14:32:50 crc kubenswrapper[4717]: I1002 14:32:50.933709 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-ql5c8" event={"ID":"cfa1a1ce-0405-488f-91e9-f12ccfd2636c","Type":"ContainerStarted","Data":"591c64a8986f448f365d2b5c995f4a56572f18b66a880acd858be7fb44882af5"} Oct 02 14:32:50 crc kubenswrapper[4717]: I1002 14:32:50.956165 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-ql5c8" podStartSLOduration=5.290722962 podStartE2EDuration="11.956147293s" podCreationTimestamp="2025-10-02 14:32:39 +0000 UTC" firstStartedPulling="2025-10-02 14:32:39.590223081 +0000 UTC m=+710.442077527" lastFinishedPulling="2025-10-02 14:32:46.255647412 +0000 UTC m=+717.107501858" observedRunningTime="2025-10-02 14:32:50.953777808 +0000 UTC m=+721.805632254" watchObservedRunningTime="2025-10-02 14:32:50.956147293 +0000 UTC m=+721.808001739" Oct 02 14:32:51 crc kubenswrapper[4717]: I1002 14:32:51.019320 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-dwj7m" Oct 02 14:32:51 crc kubenswrapper[4717]: I1002 14:32:51.942443 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:54 crc kubenswrapper[4717]: I1002 14:32:54.459718 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:54 crc kubenswrapper[4717]: I1002 14:32:54.507146 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:57 crc kubenswrapper[4717]: I1002 14:32:57.954864 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-79d4c"] Oct 02 14:32:57 crc kubenswrapper[4717]: I1002 14:32:57.956067 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-79d4c" Oct 02 14:32:57 crc kubenswrapper[4717]: I1002 14:32:57.959071 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 02 14:32:57 crc kubenswrapper[4717]: I1002 14:32:57.959456 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 02 14:32:57 crc kubenswrapper[4717]: I1002 14:32:57.960302 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-lb68b" Oct 02 14:32:57 crc kubenswrapper[4717]: I1002 14:32:57.966742 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-79d4c"] Oct 02 14:32:58 crc kubenswrapper[4717]: I1002 14:32:58.080375 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwspz\" (UniqueName: \"kubernetes.io/projected/a2b9f945-cbd8-434c-a10f-31bd9d1840a8-kube-api-access-gwspz\") pod \"mariadb-operator-index-79d4c\" (UID: \"a2b9f945-cbd8-434c-a10f-31bd9d1840a8\") " pod="openstack-operators/mariadb-operator-index-79d4c" Oct 02 14:32:58 crc kubenswrapper[4717]: I1002 14:32:58.183042 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwspz\" (UniqueName: \"kubernetes.io/projected/a2b9f945-cbd8-434c-a10f-31bd9d1840a8-kube-api-access-gwspz\") pod \"mariadb-operator-index-79d4c\" (UID: \"a2b9f945-cbd8-434c-a10f-31bd9d1840a8\") " pod="openstack-operators/mariadb-operator-index-79d4c" Oct 02 14:32:58 crc kubenswrapper[4717]: I1002 14:32:58.208352 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwspz\" (UniqueName: \"kubernetes.io/projected/a2b9f945-cbd8-434c-a10f-31bd9d1840a8-kube-api-access-gwspz\") pod \"mariadb-operator-index-79d4c\" (UID: \"a2b9f945-cbd8-434c-a10f-31bd9d1840a8\") " pod="openstack-operators/mariadb-operator-index-79d4c" Oct 02 14:32:58 crc kubenswrapper[4717]: I1002 14:32:58.274980 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-79d4c" Oct 02 14:32:58 crc kubenswrapper[4717]: I1002 14:32:58.518456 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-79d4c"] Oct 02 14:32:58 crc kubenswrapper[4717]: I1002 14:32:58.987363 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-79d4c" event={"ID":"a2b9f945-cbd8-434c-a10f-31bd9d1840a8","Type":"ContainerStarted","Data":"4ea663e39c4fae7dc8bc5035246d207165840ea20f41ca91bf86cc872128c523"} Oct 02 14:32:59 crc kubenswrapper[4717]: I1002 14:32:59.465419 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-ql5c8" Oct 02 14:32:59 crc kubenswrapper[4717]: I1002 14:32:59.470949 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-29hc8" Oct 02 14:32:59 crc kubenswrapper[4717]: I1002 14:32:59.995534 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-79d4c" event={"ID":"a2b9f945-cbd8-434c-a10f-31bd9d1840a8","Type":"ContainerStarted","Data":"d26ec9115c741f02720a3e0b44c1a9bc827c413bdf6c82e31e1f586d1b28cde8"} Oct 02 14:33:00 crc kubenswrapper[4717]: I1002 14:33:00.018493 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-79d4c" podStartSLOduration=2.127942402 podStartE2EDuration="3.018459024s" podCreationTimestamp="2025-10-02 14:32:57 +0000 UTC" firstStartedPulling="2025-10-02 14:32:58.529634678 +0000 UTC m=+729.381489124" lastFinishedPulling="2025-10-02 14:32:59.4201513 +0000 UTC m=+730.272005746" observedRunningTime="2025-10-02 14:33:00.011436321 +0000 UTC m=+730.863290807" watchObservedRunningTime="2025-10-02 14:33:00.018459024 +0000 UTC m=+730.870313530" Oct 02 14:33:01 crc kubenswrapper[4717]: I1002 14:33:01.136574 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-79d4c"] Oct 02 14:33:01 crc kubenswrapper[4717]: I1002 14:33:01.742706 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-8wntw"] Oct 02 14:33:01 crc kubenswrapper[4717]: I1002 14:33:01.743886 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:01 crc kubenswrapper[4717]: I1002 14:33:01.753092 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-8wntw"] Oct 02 14:33:01 crc kubenswrapper[4717]: I1002 14:33:01.834223 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2ldn\" (UniqueName: \"kubernetes.io/projected/4db284ef-7569-4a82-b4f5-b49b66745a31-kube-api-access-f2ldn\") pod \"mariadb-operator-index-8wntw\" (UID: \"4db284ef-7569-4a82-b4f5-b49b66745a31\") " pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:01 crc kubenswrapper[4717]: I1002 14:33:01.936233 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2ldn\" (UniqueName: \"kubernetes.io/projected/4db284ef-7569-4a82-b4f5-b49b66745a31-kube-api-access-f2ldn\") pod \"mariadb-operator-index-8wntw\" (UID: \"4db284ef-7569-4a82-b4f5-b49b66745a31\") " pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:02 crc kubenswrapper[4717]: I1002 14:33:02.007178 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-79d4c" podUID="a2b9f945-cbd8-434c-a10f-31bd9d1840a8" containerName="registry-server" containerID="cri-o://d26ec9115c741f02720a3e0b44c1a9bc827c413bdf6c82e31e1f586d1b28cde8" gracePeriod=2 Oct 02 14:33:02 crc kubenswrapper[4717]: I1002 14:33:02.011651 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2ldn\" (UniqueName: \"kubernetes.io/projected/4db284ef-7569-4a82-b4f5-b49b66745a31-kube-api-access-f2ldn\") pod \"mariadb-operator-index-8wntw\" (UID: \"4db284ef-7569-4a82-b4f5-b49b66745a31\") " pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:02 crc kubenswrapper[4717]: I1002 14:33:02.078990 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:02 crc kubenswrapper[4717]: I1002 14:33:02.485970 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-8wntw"] Oct 02 14:33:02 crc kubenswrapper[4717]: W1002 14:33:02.496211 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4db284ef_7569_4a82_b4f5_b49b66745a31.slice/crio-efe0e7280c93973a0ba455a34fd42318a5142a07e7716770f66f9f79fc2fb4b8 WatchSource:0}: Error finding container efe0e7280c93973a0ba455a34fd42318a5142a07e7716770f66f9f79fc2fb4b8: Status 404 returned error can't find the container with id efe0e7280c93973a0ba455a34fd42318a5142a07e7716770f66f9f79fc2fb4b8 Oct 02 14:33:03 crc kubenswrapper[4717]: I1002 14:33:03.014749 4717 generic.go:334] "Generic (PLEG): container finished" podID="a2b9f945-cbd8-434c-a10f-31bd9d1840a8" containerID="d26ec9115c741f02720a3e0b44c1a9bc827c413bdf6c82e31e1f586d1b28cde8" exitCode=0 Oct 02 14:33:03 crc kubenswrapper[4717]: I1002 14:33:03.014824 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-79d4c" event={"ID":"a2b9f945-cbd8-434c-a10f-31bd9d1840a8","Type":"ContainerDied","Data":"d26ec9115c741f02720a3e0b44c1a9bc827c413bdf6c82e31e1f586d1b28cde8"} Oct 02 14:33:03 crc kubenswrapper[4717]: I1002 14:33:03.016352 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-8wntw" event={"ID":"4db284ef-7569-4a82-b4f5-b49b66745a31","Type":"ContainerStarted","Data":"efe0e7280c93973a0ba455a34fd42318a5142a07e7716770f66f9f79fc2fb4b8"} Oct 02 14:33:03 crc kubenswrapper[4717]: I1002 14:33:03.404907 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-79d4c" Oct 02 14:33:03 crc kubenswrapper[4717]: I1002 14:33:03.456059 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwspz\" (UniqueName: \"kubernetes.io/projected/a2b9f945-cbd8-434c-a10f-31bd9d1840a8-kube-api-access-gwspz\") pod \"a2b9f945-cbd8-434c-a10f-31bd9d1840a8\" (UID: \"a2b9f945-cbd8-434c-a10f-31bd9d1840a8\") " Oct 02 14:33:03 crc kubenswrapper[4717]: I1002 14:33:03.467727 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2b9f945-cbd8-434c-a10f-31bd9d1840a8-kube-api-access-gwspz" (OuterVolumeSpecName: "kube-api-access-gwspz") pod "a2b9f945-cbd8-434c-a10f-31bd9d1840a8" (UID: "a2b9f945-cbd8-434c-a10f-31bd9d1840a8"). InnerVolumeSpecName "kube-api-access-gwspz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:33:03 crc kubenswrapper[4717]: I1002 14:33:03.557996 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwspz\" (UniqueName: \"kubernetes.io/projected/a2b9f945-cbd8-434c-a10f-31bd9d1840a8-kube-api-access-gwspz\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:04 crc kubenswrapper[4717]: I1002 14:33:04.022668 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-8wntw" event={"ID":"4db284ef-7569-4a82-b4f5-b49b66745a31","Type":"ContainerStarted","Data":"a402929c69d95b29ec5b9440ec41441206ddfd86cbbcfe06b464234705003f67"} Oct 02 14:33:04 crc kubenswrapper[4717]: I1002 14:33:04.024077 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-79d4c" event={"ID":"a2b9f945-cbd8-434c-a10f-31bd9d1840a8","Type":"ContainerDied","Data":"4ea663e39c4fae7dc8bc5035246d207165840ea20f41ca91bf86cc872128c523"} Oct 02 14:33:04 crc kubenswrapper[4717]: I1002 14:33:04.024130 4717 scope.go:117] "RemoveContainer" containerID="d26ec9115c741f02720a3e0b44c1a9bc827c413bdf6c82e31e1f586d1b28cde8" Oct 02 14:33:04 crc kubenswrapper[4717]: I1002 14:33:04.024363 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-79d4c" Oct 02 14:33:04 crc kubenswrapper[4717]: I1002 14:33:04.042367 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-8wntw" podStartSLOduration=2.243174727 podStartE2EDuration="3.042348045s" podCreationTimestamp="2025-10-02 14:33:01 +0000 UTC" firstStartedPulling="2025-10-02 14:33:02.501600476 +0000 UTC m=+733.353454922" lastFinishedPulling="2025-10-02 14:33:03.300773774 +0000 UTC m=+734.152628240" observedRunningTime="2025-10-02 14:33:04.03668658 +0000 UTC m=+734.888541026" watchObservedRunningTime="2025-10-02 14:33:04.042348045 +0000 UTC m=+734.894202491" Oct 02 14:33:04 crc kubenswrapper[4717]: I1002 14:33:04.062899 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-79d4c"] Oct 02 14:33:04 crc kubenswrapper[4717]: I1002 14:33:04.065735 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-79d4c"] Oct 02 14:33:04 crc kubenswrapper[4717]: I1002 14:33:04.852069 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2b9f945-cbd8-434c-a10f-31bd9d1840a8" path="/var/lib/kubelet/pods/a2b9f945-cbd8-434c-a10f-31bd9d1840a8/volumes" Oct 02 14:33:12 crc kubenswrapper[4717]: I1002 14:33:12.080249 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:12 crc kubenswrapper[4717]: I1002 14:33:12.081444 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:12 crc kubenswrapper[4717]: I1002 14:33:12.130127 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:13 crc kubenswrapper[4717]: I1002 14:33:13.124608 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-8wntw" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.189676 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv"] Oct 02 14:33:18 crc kubenswrapper[4717]: E1002 14:33:18.190759 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b9f945-cbd8-434c-a10f-31bd9d1840a8" containerName="registry-server" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.190789 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b9f945-cbd8-434c-a10f-31bd9d1840a8" containerName="registry-server" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.191069 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2b9f945-cbd8-434c-a10f-31bd9d1840a8" containerName="registry-server" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.192717 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.194878 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dzqss" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.214723 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv"] Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.255454 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlxdt\" (UniqueName: \"kubernetes.io/projected/9e697dbf-f458-4f6f-83af-57ef6086b720-kube-api-access-dlxdt\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.255545 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-bundle\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.255629 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-util\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.358026 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlxdt\" (UniqueName: \"kubernetes.io/projected/9e697dbf-f458-4f6f-83af-57ef6086b720-kube-api-access-dlxdt\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.358119 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-util\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.358163 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-bundle\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.359321 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-bundle\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.359380 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-util\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.390756 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlxdt\" (UniqueName: \"kubernetes.io/projected/9e697dbf-f458-4f6f-83af-57ef6086b720-kube-api-access-dlxdt\") pod \"5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.512339 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.624483 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.624843 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:33:18 crc kubenswrapper[4717]: I1002 14:33:18.726006 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv"] Oct 02 14:33:19 crc kubenswrapper[4717]: I1002 14:33:19.136463 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" event={"ID":"9e697dbf-f458-4f6f-83af-57ef6086b720","Type":"ContainerDied","Data":"26431179056b3b5deb9b48e2a1878d36b9069814fa842201272dbdf4537e6ae5"} Oct 02 14:33:19 crc kubenswrapper[4717]: I1002 14:33:19.136557 4717 generic.go:334] "Generic (PLEG): container finished" podID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerID="26431179056b3b5deb9b48e2a1878d36b9069814fa842201272dbdf4537e6ae5" exitCode=0 Oct 02 14:33:19 crc kubenswrapper[4717]: I1002 14:33:19.136877 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" event={"ID":"9e697dbf-f458-4f6f-83af-57ef6086b720","Type":"ContainerStarted","Data":"cb5a092db79e2c79bb75e3a4166a174ac02aeb31874f9dc924ba8e2db8c4f002"} Oct 02 14:33:19 crc kubenswrapper[4717]: E1002 14:33:19.202616 4717 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e697dbf_f458_4f6f_83af_57ef6086b720.slice/crio-conmon-26431179056b3b5deb9b48e2a1878d36b9069814fa842201272dbdf4537e6ae5.scope\": RecentStats: unable to find data in memory cache]" Oct 02 14:33:22 crc kubenswrapper[4717]: I1002 14:33:22.157465 4717 generic.go:334] "Generic (PLEG): container finished" podID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerID="7c26c88376312874ccd77243da33cf1fe3baea3ff647d213067b4b66284f7f59" exitCode=0 Oct 02 14:33:22 crc kubenswrapper[4717]: I1002 14:33:22.157583 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" event={"ID":"9e697dbf-f458-4f6f-83af-57ef6086b720","Type":"ContainerDied","Data":"7c26c88376312874ccd77243da33cf1fe3baea3ff647d213067b4b66284f7f59"} Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.049632 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gkzjm"] Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.050254 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" podUID="4ee2df25-8a54-4608-b82e-41edda414d2b" containerName="controller-manager" containerID="cri-o://191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b" gracePeriod=30 Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.061516 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr"] Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.061766 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" podUID="aed4d244-0337-468e-a9d5-d9b0ea805a41" containerName="route-controller-manager" containerID="cri-o://af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d" gracePeriod=30 Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.172990 4717 generic.go:334] "Generic (PLEG): container finished" podID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerID="eae8bed2eb33d0348a85aa20f210825e849b0106bb5935c36c466feb5c495d23" exitCode=0 Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.173043 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" event={"ID":"9e697dbf-f458-4f6f-83af-57ef6086b720","Type":"ContainerDied","Data":"eae8bed2eb33d0348a85aa20f210825e849b0106bb5935c36c466feb5c495d23"} Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.523563 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.527404 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645319 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-config\") pod \"aed4d244-0337-468e-a9d5-d9b0ea805a41\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645386 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-client-ca\") pod \"4ee2df25-8a54-4608-b82e-41edda414d2b\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645431 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aed4d244-0337-468e-a9d5-d9b0ea805a41-serving-cert\") pod \"aed4d244-0337-468e-a9d5-d9b0ea805a41\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645519 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ee2df25-8a54-4608-b82e-41edda414d2b-serving-cert\") pod \"4ee2df25-8a54-4608-b82e-41edda414d2b\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645541 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlpgd\" (UniqueName: \"kubernetes.io/projected/aed4d244-0337-468e-a9d5-d9b0ea805a41-kube-api-access-xlpgd\") pod \"aed4d244-0337-468e-a9d5-d9b0ea805a41\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645560 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-client-ca\") pod \"aed4d244-0337-468e-a9d5-d9b0ea805a41\" (UID: \"aed4d244-0337-468e-a9d5-d9b0ea805a41\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645597 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5hrq\" (UniqueName: \"kubernetes.io/projected/4ee2df25-8a54-4608-b82e-41edda414d2b-kube-api-access-r5hrq\") pod \"4ee2df25-8a54-4608-b82e-41edda414d2b\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645621 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-proxy-ca-bundles\") pod \"4ee2df25-8a54-4608-b82e-41edda414d2b\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.645649 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-config\") pod \"4ee2df25-8a54-4608-b82e-41edda414d2b\" (UID: \"4ee2df25-8a54-4608-b82e-41edda414d2b\") " Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.646488 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-config" (OuterVolumeSpecName: "config") pod "aed4d244-0337-468e-a9d5-d9b0ea805a41" (UID: "aed4d244-0337-468e-a9d5-d9b0ea805a41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.646518 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-client-ca" (OuterVolumeSpecName: "client-ca") pod "4ee2df25-8a54-4608-b82e-41edda414d2b" (UID: "4ee2df25-8a54-4608-b82e-41edda414d2b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.646640 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-config" (OuterVolumeSpecName: "config") pod "4ee2df25-8a54-4608-b82e-41edda414d2b" (UID: "4ee2df25-8a54-4608-b82e-41edda414d2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.647417 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4ee2df25-8a54-4608-b82e-41edda414d2b" (UID: "4ee2df25-8a54-4608-b82e-41edda414d2b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.647726 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-client-ca" (OuterVolumeSpecName: "client-ca") pod "aed4d244-0337-468e-a9d5-d9b0ea805a41" (UID: "aed4d244-0337-468e-a9d5-d9b0ea805a41"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.653090 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ee2df25-8a54-4608-b82e-41edda414d2b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4ee2df25-8a54-4608-b82e-41edda414d2b" (UID: "4ee2df25-8a54-4608-b82e-41edda414d2b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.653110 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aed4d244-0337-468e-a9d5-d9b0ea805a41-kube-api-access-xlpgd" (OuterVolumeSpecName: "kube-api-access-xlpgd") pod "aed4d244-0337-468e-a9d5-d9b0ea805a41" (UID: "aed4d244-0337-468e-a9d5-d9b0ea805a41"). InnerVolumeSpecName "kube-api-access-xlpgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.653105 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed4d244-0337-468e-a9d5-d9b0ea805a41-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "aed4d244-0337-468e-a9d5-d9b0ea805a41" (UID: "aed4d244-0337-468e-a9d5-d9b0ea805a41"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.655770 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ee2df25-8a54-4608-b82e-41edda414d2b-kube-api-access-r5hrq" (OuterVolumeSpecName: "kube-api-access-r5hrq") pod "4ee2df25-8a54-4608-b82e-41edda414d2b" (UID: "4ee2df25-8a54-4608-b82e-41edda414d2b"). InnerVolumeSpecName "kube-api-access-r5hrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746468 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5hrq\" (UniqueName: \"kubernetes.io/projected/4ee2df25-8a54-4608-b82e-41edda414d2b-kube-api-access-r5hrq\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746500 4717 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746509 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746518 4717 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746525 4717 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4ee2df25-8a54-4608-b82e-41edda414d2b-client-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746533 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aed4d244-0337-468e-a9d5-d9b0ea805a41-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746541 4717 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ee2df25-8a54-4608-b82e-41edda414d2b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746550 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlpgd\" (UniqueName: \"kubernetes.io/projected/aed4d244-0337-468e-a9d5-d9b0ea805a41-kube-api-access-xlpgd\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:23 crc kubenswrapper[4717]: I1002 14:33:23.746558 4717 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aed4d244-0337-468e-a9d5-d9b0ea805a41-client-ca\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.181841 4717 generic.go:334] "Generic (PLEG): container finished" podID="aed4d244-0337-468e-a9d5-d9b0ea805a41" containerID="af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d" exitCode=0 Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.181924 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.181964 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" event={"ID":"aed4d244-0337-468e-a9d5-d9b0ea805a41","Type":"ContainerDied","Data":"af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d"} Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.182112 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr" event={"ID":"aed4d244-0337-468e-a9d5-d9b0ea805a41","Type":"ContainerDied","Data":"4abd6e0e388855b520b122119b68e8380b16a9655f4632e30a65593545cf78e5"} Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.182142 4717 scope.go:117] "RemoveContainer" containerID="af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.184066 4717 generic.go:334] "Generic (PLEG): container finished" podID="4ee2df25-8a54-4608-b82e-41edda414d2b" containerID="191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b" exitCode=0 Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.184101 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.184190 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" event={"ID":"4ee2df25-8a54-4608-b82e-41edda414d2b","Type":"ContainerDied","Data":"191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b"} Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.184256 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-gkzjm" event={"ID":"4ee2df25-8a54-4608-b82e-41edda414d2b","Type":"ContainerDied","Data":"7ddb797a4c90a33ebdb9948910e7a9de2b9fbbd95b3201575aa497483b76d2ad"} Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.203270 4717 scope.go:117] "RemoveContainer" containerID="af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d" Oct 02 14:33:24 crc kubenswrapper[4717]: E1002 14:33:24.204020 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d\": container with ID starting with af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d not found: ID does not exist" containerID="af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.204064 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d"} err="failed to get container status \"af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d\": rpc error: code = NotFound desc = could not find container \"af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d\": container with ID starting with af87f88b5f6e1c9a06d7223d6c4e048de2b7e8a164147f2a0927c2edcd9f187d not found: ID does not exist" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.204094 4717 scope.go:117] "RemoveContainer" containerID="191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.226365 4717 scope.go:117] "RemoveContainer" containerID="191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b" Oct 02 14:33:24 crc kubenswrapper[4717]: E1002 14:33:24.227110 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b\": container with ID starting with 191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b not found: ID does not exist" containerID="191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.227171 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b"} err="failed to get container status \"191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b\": rpc error: code = NotFound desc = could not find container \"191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b\": container with ID starting with 191f85080e943e1ac3edcefb68e429a2a6aef50abb99e79f9dc996ca50f13d9b not found: ID does not exist" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.246246 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gkzjm"] Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.254203 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-gkzjm"] Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.260127 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr"] Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.265897 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5zkkr"] Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.458031 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.557365 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlxdt\" (UniqueName: \"kubernetes.io/projected/9e697dbf-f458-4f6f-83af-57ef6086b720-kube-api-access-dlxdt\") pod \"9e697dbf-f458-4f6f-83af-57ef6086b720\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.557411 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-bundle\") pod \"9e697dbf-f458-4f6f-83af-57ef6086b720\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.557446 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-util\") pod \"9e697dbf-f458-4f6f-83af-57ef6086b720\" (UID: \"9e697dbf-f458-4f6f-83af-57ef6086b720\") " Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.558425 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-bundle" (OuterVolumeSpecName: "bundle") pod "9e697dbf-f458-4f6f-83af-57ef6086b720" (UID: "9e697dbf-f458-4f6f-83af-57ef6086b720"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.562061 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e697dbf-f458-4f6f-83af-57ef6086b720-kube-api-access-dlxdt" (OuterVolumeSpecName: "kube-api-access-dlxdt") pod "9e697dbf-f458-4f6f-83af-57ef6086b720" (UID: "9e697dbf-f458-4f6f-83af-57ef6086b720"). InnerVolumeSpecName "kube-api-access-dlxdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.658643 4717 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.658679 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlxdt\" (UniqueName: \"kubernetes.io/projected/9e697dbf-f458-4f6f-83af-57ef6086b720-kube-api-access-dlxdt\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.845747 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ee2df25-8a54-4608-b82e-41edda414d2b" path="/var/lib/kubelet/pods/4ee2df25-8a54-4608-b82e-41edda414d2b/volumes" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.846304 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aed4d244-0337-468e-a9d5-d9b0ea805a41" path="/var/lib/kubelet/pods/aed4d244-0337-468e-a9d5-d9b0ea805a41/volumes" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.868063 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-util" (OuterVolumeSpecName: "util") pod "9e697dbf-f458-4f6f-83af-57ef6086b720" (UID: "9e697dbf-f458-4f6f-83af-57ef6086b720"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:33:24 crc kubenswrapper[4717]: I1002 14:33:24.962076 4717 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9e697dbf-f458-4f6f-83af-57ef6086b720-util\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.046986 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5"] Oct 02 14:33:25 crc kubenswrapper[4717]: E1002 14:33:25.047255 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerName="extract" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.047271 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerName="extract" Oct 02 14:33:25 crc kubenswrapper[4717]: E1002 14:33:25.047286 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerName="util" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.047294 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerName="util" Oct 02 14:33:25 crc kubenswrapper[4717]: E1002 14:33:25.047313 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee2df25-8a54-4608-b82e-41edda414d2b" containerName="controller-manager" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.047321 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee2df25-8a54-4608-b82e-41edda414d2b" containerName="controller-manager" Oct 02 14:33:25 crc kubenswrapper[4717]: E1002 14:33:25.047336 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed4d244-0337-468e-a9d5-d9b0ea805a41" containerName="route-controller-manager" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.047347 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed4d244-0337-468e-a9d5-d9b0ea805a41" containerName="route-controller-manager" Oct 02 14:33:25 crc kubenswrapper[4717]: E1002 14:33:25.047358 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerName="pull" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.047366 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerName="pull" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.047527 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed4d244-0337-468e-a9d5-d9b0ea805a41" containerName="route-controller-manager" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.047547 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ee2df25-8a54-4608-b82e-41edda414d2b" containerName="controller-manager" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.047555 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e697dbf-f458-4f6f-83af-57ef6086b720" containerName="extract" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.048066 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.050174 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.050656 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.051090 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc"] Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.051796 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.057602 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.057899 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.058041 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.058169 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.058334 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.058420 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.059167 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.059315 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.059803 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.060203 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.064010 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5"] Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.071468 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.074327 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc"] Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.166079 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-config\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.166605 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-client-ca\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.166792 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d1931fa-d548-421e-b7b9-600e73bae177-config\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.166992 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-proxy-ca-bundles\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.167156 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d1931fa-d548-421e-b7b9-600e73bae177-serving-cert\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.167678 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4d1931fa-d548-421e-b7b9-600e73bae177-client-ca\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.167875 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4xbm\" (UniqueName: \"kubernetes.io/projected/4d1931fa-d548-421e-b7b9-600e73bae177-kube-api-access-s4xbm\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.168124 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnvwq\" (UniqueName: \"kubernetes.io/projected/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-kube-api-access-rnvwq\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.168341 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-serving-cert\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.200729 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" event={"ID":"9e697dbf-f458-4f6f-83af-57ef6086b720","Type":"ContainerDied","Data":"cb5a092db79e2c79bb75e3a4166a174ac02aeb31874f9dc924ba8e2db8c4f002"} Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.200774 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb5a092db79e2c79bb75e3a4166a174ac02aeb31874f9dc924ba8e2db8c4f002" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.200795 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.269667 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4d1931fa-d548-421e-b7b9-600e73bae177-client-ca\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.269759 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4xbm\" (UniqueName: \"kubernetes.io/projected/4d1931fa-d548-421e-b7b9-600e73bae177-kube-api-access-s4xbm\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.269788 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnvwq\" (UniqueName: \"kubernetes.io/projected/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-kube-api-access-rnvwq\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.269810 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-serving-cert\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.269852 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-config\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.269901 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-client-ca\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.269923 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d1931fa-d548-421e-b7b9-600e73bae177-config\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.269974 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-proxy-ca-bundles\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.270001 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d1931fa-d548-421e-b7b9-600e73bae177-serving-cert\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.270669 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4d1931fa-d548-421e-b7b9-600e73bae177-client-ca\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.271693 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d1931fa-d548-421e-b7b9-600e73bae177-config\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.271993 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-client-ca\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.272055 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-config\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.272329 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-proxy-ca-bundles\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.276916 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d1931fa-d548-421e-b7b9-600e73bae177-serving-cert\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.287239 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-serving-cert\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.290424 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4xbm\" (UniqueName: \"kubernetes.io/projected/4d1931fa-d548-421e-b7b9-600e73bae177-kube-api-access-s4xbm\") pod \"route-controller-manager-8656dbc9b4-x5rg5\" (UID: \"4d1931fa-d548-421e-b7b9-600e73bae177\") " pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.292237 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnvwq\" (UniqueName: \"kubernetes.io/projected/9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1-kube-api-access-rnvwq\") pod \"controller-manager-7f9f8bb6cb-sqnfc\" (UID: \"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1\") " pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.383495 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.391510 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.613589 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5"] Oct 02 14:33:25 crc kubenswrapper[4717]: W1002 14:33:25.619985 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d1931fa_d548_421e_b7b9_600e73bae177.slice/crio-ad40eb7db42b35fb8644ce71e3720415bdd3d9d6c05169b2c8b3d885aec0c171 WatchSource:0}: Error finding container ad40eb7db42b35fb8644ce71e3720415bdd3d9d6c05169b2c8b3d885aec0c171: Status 404 returned error can't find the container with id ad40eb7db42b35fb8644ce71e3720415bdd3d9d6c05169b2c8b3d885aec0c171 Oct 02 14:33:25 crc kubenswrapper[4717]: I1002 14:33:25.682705 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc"] Oct 02 14:33:25 crc kubenswrapper[4717]: W1002 14:33:25.706541 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9379e145_2a80_4ec9_80a3_4d3bd1d1f1b1.slice/crio-5a381ac485dd0c37ea994322b9f57ad68cefbf6b952e1062b5b802181e1f99fe WatchSource:0}: Error finding container 5a381ac485dd0c37ea994322b9f57ad68cefbf6b952e1062b5b802181e1f99fe: Status 404 returned error can't find the container with id 5a381ac485dd0c37ea994322b9f57ad68cefbf6b952e1062b5b802181e1f99fe Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.207683 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" event={"ID":"4d1931fa-d548-421e-b7b9-600e73bae177","Type":"ContainerStarted","Data":"986a87bca42c9a18973b7d09c4fafeae931654376007a4b284e55a34c28cea84"} Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.207726 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" event={"ID":"4d1931fa-d548-421e-b7b9-600e73bae177","Type":"ContainerStarted","Data":"ad40eb7db42b35fb8644ce71e3720415bdd3d9d6c05169b2c8b3d885aec0c171"} Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.208883 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.210664 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" event={"ID":"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1","Type":"ContainerStarted","Data":"679e2353e09aeeb57cd47859476767f8b73be93a85c436669377177e4ee04c80"} Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.210688 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" event={"ID":"9379e145-2a80-4ec9-80a3-4d3bd1d1f1b1","Type":"ContainerStarted","Data":"5a381ac485dd0c37ea994322b9f57ad68cefbf6b952e1062b5b802181e1f99fe"} Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.210925 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.218784 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.233960 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" podStartSLOduration=3.233874717 podStartE2EDuration="3.233874717s" podCreationTimestamp="2025-10-02 14:33:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:33:26.230148915 +0000 UTC m=+757.082003371" watchObservedRunningTime="2025-10-02 14:33:26.233874717 +0000 UTC m=+757.085729173" Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.251865 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f9f8bb6cb-sqnfc" podStartSLOduration=3.25185005 podStartE2EDuration="3.25185005s" podCreationTimestamp="2025-10-02 14:33:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:33:26.249011282 +0000 UTC m=+757.100865728" watchObservedRunningTime="2025-10-02 14:33:26.25185005 +0000 UTC m=+757.103704496" Oct 02 14:33:26 crc kubenswrapper[4717]: I1002 14:33:26.500343 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-8656dbc9b4-x5rg5" Oct 02 14:33:31 crc kubenswrapper[4717]: I1002 14:33:31.823346 4717 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 02 14:33:31 crc kubenswrapper[4717]: I1002 14:33:31.943308 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr"] Oct 02 14:33:31 crc kubenswrapper[4717]: I1002 14:33:31.944168 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:31 crc kubenswrapper[4717]: I1002 14:33:31.945968 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Oct 02 14:33:31 crc kubenswrapper[4717]: I1002 14:33:31.961633 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 02 14:33:31 crc kubenswrapper[4717]: I1002 14:33:31.962259 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-lj98f" Oct 02 14:33:31 crc kubenswrapper[4717]: I1002 14:33:31.979041 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr"] Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.072117 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2r4l\" (UniqueName: \"kubernetes.io/projected/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-kube-api-access-j2r4l\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.072170 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-webhook-cert\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.072228 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-apiservice-cert\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.173564 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2r4l\" (UniqueName: \"kubernetes.io/projected/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-kube-api-access-j2r4l\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.174140 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-webhook-cert\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.174198 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-apiservice-cert\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.184901 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-apiservice-cert\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.187491 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-webhook-cert\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.198760 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2r4l\" (UniqueName: \"kubernetes.io/projected/85760b2d-ab19-40a7-91a2-b9266f9ab4d1-kube-api-access-j2r4l\") pod \"mariadb-operator-controller-manager-7b6fb6cd76-5ddtr\" (UID: \"85760b2d-ab19-40a7-91a2-b9266f9ab4d1\") " pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.275464 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:32 crc kubenswrapper[4717]: I1002 14:33:32.686443 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr"] Oct 02 14:33:33 crc kubenswrapper[4717]: I1002 14:33:33.249392 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" event={"ID":"85760b2d-ab19-40a7-91a2-b9266f9ab4d1","Type":"ContainerStarted","Data":"0eb8acdff912c195d932c9ab87641553235f382518ae38f7ba2681c65872123f"} Oct 02 14:33:36 crc kubenswrapper[4717]: I1002 14:33:36.266159 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" event={"ID":"85760b2d-ab19-40a7-91a2-b9266f9ab4d1","Type":"ContainerStarted","Data":"cead3d17a364ec15b7153e7c7ed73c29b331905ae833ae0cc43488a617b2c43d"} Oct 02 14:33:39 crc kubenswrapper[4717]: I1002 14:33:39.294967 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" event={"ID":"85760b2d-ab19-40a7-91a2-b9266f9ab4d1","Type":"ContainerStarted","Data":"602fbb96141100cb459bd3d3da86419a7881d5f299a2fe94f61cea661d0b2c5c"} Oct 02 14:33:39 crc kubenswrapper[4717]: I1002 14:33:39.298816 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:39 crc kubenswrapper[4717]: I1002 14:33:39.321837 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" podStartSLOduration=2.726322951 podStartE2EDuration="8.321816807s" podCreationTimestamp="2025-10-02 14:33:31 +0000 UTC" firstStartedPulling="2025-10-02 14:33:32.696471582 +0000 UTC m=+763.548326028" lastFinishedPulling="2025-10-02 14:33:38.291965438 +0000 UTC m=+769.143819884" observedRunningTime="2025-10-02 14:33:39.31388512 +0000 UTC m=+770.165739576" watchObservedRunningTime="2025-10-02 14:33:39.321816807 +0000 UTC m=+770.173671263" Oct 02 14:33:40 crc kubenswrapper[4717]: I1002 14:33:40.304286 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-7b6fb6cd76-5ddtr" Oct 02 14:33:43 crc kubenswrapper[4717]: I1002 14:33:43.944006 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-2njqx"] Oct 02 14:33:43 crc kubenswrapper[4717]: I1002 14:33:43.945665 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2njqx" Oct 02 14:33:43 crc kubenswrapper[4717]: I1002 14:33:43.948159 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-b4bx6" Oct 02 14:33:43 crc kubenswrapper[4717]: I1002 14:33:43.954079 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-2njqx"] Oct 02 14:33:44 crc kubenswrapper[4717]: I1002 14:33:44.026848 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hhdh\" (UniqueName: \"kubernetes.io/projected/a381f248-1d86-4696-ac55-8e7d3252ebf6-kube-api-access-8hhdh\") pod \"infra-operator-index-2njqx\" (UID: \"a381f248-1d86-4696-ac55-8e7d3252ebf6\") " pod="openstack-operators/infra-operator-index-2njqx" Oct 02 14:33:44 crc kubenswrapper[4717]: I1002 14:33:44.128456 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hhdh\" (UniqueName: \"kubernetes.io/projected/a381f248-1d86-4696-ac55-8e7d3252ebf6-kube-api-access-8hhdh\") pod \"infra-operator-index-2njqx\" (UID: \"a381f248-1d86-4696-ac55-8e7d3252ebf6\") " pod="openstack-operators/infra-operator-index-2njqx" Oct 02 14:33:44 crc kubenswrapper[4717]: I1002 14:33:44.146020 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hhdh\" (UniqueName: \"kubernetes.io/projected/a381f248-1d86-4696-ac55-8e7d3252ebf6-kube-api-access-8hhdh\") pod \"infra-operator-index-2njqx\" (UID: \"a381f248-1d86-4696-ac55-8e7d3252ebf6\") " pod="openstack-operators/infra-operator-index-2njqx" Oct 02 14:33:44 crc kubenswrapper[4717]: I1002 14:33:44.275818 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2njqx" Oct 02 14:33:44 crc kubenswrapper[4717]: I1002 14:33:44.644689 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-2njqx"] Oct 02 14:33:45 crc kubenswrapper[4717]: I1002 14:33:45.325366 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2njqx" event={"ID":"a381f248-1d86-4696-ac55-8e7d3252ebf6","Type":"ContainerStarted","Data":"7f7aa0351b93fb74fc57efcc6c7b9e602881d18cd0ba487acb10777f5925edf6"} Oct 02 14:33:46 crc kubenswrapper[4717]: I1002 14:33:46.333727 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2njqx" event={"ID":"a381f248-1d86-4696-ac55-8e7d3252ebf6","Type":"ContainerStarted","Data":"b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539"} Oct 02 14:33:46 crc kubenswrapper[4717]: I1002 14:33:46.354173 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-2njqx" podStartSLOduration=2.328982843 podStartE2EDuration="3.354152024s" podCreationTimestamp="2025-10-02 14:33:43 +0000 UTC" firstStartedPulling="2025-10-02 14:33:44.651978972 +0000 UTC m=+775.503833418" lastFinishedPulling="2025-10-02 14:33:45.677148143 +0000 UTC m=+776.529002599" observedRunningTime="2025-10-02 14:33:46.350874224 +0000 UTC m=+777.202728670" watchObservedRunningTime="2025-10-02 14:33:46.354152024 +0000 UTC m=+777.206006470" Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.137403 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-2njqx"] Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.344617 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-2njqx" podUID="a381f248-1d86-4696-ac55-8e7d3252ebf6" containerName="registry-server" containerID="cri-o://b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539" gracePeriod=2 Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.620730 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.621181 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.621235 4717 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.622205 4717 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"70c25bc08fc344bcf8bb59d3376cd774b978162bd5593d10e8b7b82c8502396b"} pod="openshift-machine-config-operator/machine-config-daemon-sk55f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.622278 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" containerID="cri-o://70c25bc08fc344bcf8bb59d3376cd774b978162bd5593d10e8b7b82c8502396b" gracePeriod=600 Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.756258 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-t8wzp"] Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.757022 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.761389 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-t8wzp"] Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.857565 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2njqx" Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.888428 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5q9q\" (UniqueName: \"kubernetes.io/projected/0e56647e-922d-435a-a84d-1c1910c2391f-kube-api-access-q5q9q\") pod \"infra-operator-index-t8wzp\" (UID: \"0e56647e-922d-435a-a84d-1c1910c2391f\") " pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.990479 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hhdh\" (UniqueName: \"kubernetes.io/projected/a381f248-1d86-4696-ac55-8e7d3252ebf6-kube-api-access-8hhdh\") pod \"a381f248-1d86-4696-ac55-8e7d3252ebf6\" (UID: \"a381f248-1d86-4696-ac55-8e7d3252ebf6\") " Oct 02 14:33:48 crc kubenswrapper[4717]: I1002 14:33:48.990795 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5q9q\" (UniqueName: \"kubernetes.io/projected/0e56647e-922d-435a-a84d-1c1910c2391f-kube-api-access-q5q9q\") pod \"infra-operator-index-t8wzp\" (UID: \"0e56647e-922d-435a-a84d-1c1910c2391f\") " pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.006669 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a381f248-1d86-4696-ac55-8e7d3252ebf6-kube-api-access-8hhdh" (OuterVolumeSpecName: "kube-api-access-8hhdh") pod "a381f248-1d86-4696-ac55-8e7d3252ebf6" (UID: "a381f248-1d86-4696-ac55-8e7d3252ebf6"). InnerVolumeSpecName "kube-api-access-8hhdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.006979 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5q9q\" (UniqueName: \"kubernetes.io/projected/0e56647e-922d-435a-a84d-1c1910c2391f-kube-api-access-q5q9q\") pod \"infra-operator-index-t8wzp\" (UID: \"0e56647e-922d-435a-a84d-1c1910c2391f\") " pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.078236 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.091478 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hhdh\" (UniqueName: \"kubernetes.io/projected/a381f248-1d86-4696-ac55-8e7d3252ebf6-kube-api-access-8hhdh\") on node \"crc\" DevicePath \"\"" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.360465 4717 generic.go:334] "Generic (PLEG): container finished" podID="405aba30-0ff3-4fca-a5da-09c35263665d" containerID="70c25bc08fc344bcf8bb59d3376cd774b978162bd5593d10e8b7b82c8502396b" exitCode=0 Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.360520 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerDied","Data":"70c25bc08fc344bcf8bb59d3376cd774b978162bd5593d10e8b7b82c8502396b"} Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.360546 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"dc26c4013ef3dfdebc8448f602fd80cfb07a36d1bd8014fcfc1f4769626fbe3f"} Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.360561 4717 scope.go:117] "RemoveContainer" containerID="0e3040e8a9e53a53556b70fb7f1b6868f918c9f54853ff50605f472c544d4044" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.363788 4717 generic.go:334] "Generic (PLEG): container finished" podID="a381f248-1d86-4696-ac55-8e7d3252ebf6" containerID="b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539" exitCode=0 Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.363819 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2njqx" event={"ID":"a381f248-1d86-4696-ac55-8e7d3252ebf6","Type":"ContainerDied","Data":"b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539"} Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.363847 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-2njqx" event={"ID":"a381f248-1d86-4696-ac55-8e7d3252ebf6","Type":"ContainerDied","Data":"7f7aa0351b93fb74fc57efcc6c7b9e602881d18cd0ba487acb10777f5925edf6"} Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.363877 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-2njqx" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.389744 4717 scope.go:117] "RemoveContainer" containerID="b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.391235 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-2njqx"] Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.394152 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-2njqx"] Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.403361 4717 scope.go:117] "RemoveContainer" containerID="b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539" Oct 02 14:33:49 crc kubenswrapper[4717]: E1002 14:33:49.403742 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539\": container with ID starting with b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539 not found: ID does not exist" containerID="b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.403776 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539"} err="failed to get container status \"b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539\": rpc error: code = NotFound desc = could not find container \"b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539\": container with ID starting with b1568a88709e246ba0337f0dffc9a74c988f9e846fbfa2c283b5efdfd812c539 not found: ID does not exist" Oct 02 14:33:49 crc kubenswrapper[4717]: I1002 14:33:49.478711 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-t8wzp"] Oct 02 14:33:49 crc kubenswrapper[4717]: W1002 14:33:49.492321 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e56647e_922d_435a_a84d_1c1910c2391f.slice/crio-c0391a2fbe5646bb1782fccb4c7d12d6db6da6c9eeb6bc37538426697206f87c WatchSource:0}: Error finding container c0391a2fbe5646bb1782fccb4c7d12d6db6da6c9eeb6bc37538426697206f87c: Status 404 returned error can't find the container with id c0391a2fbe5646bb1782fccb4c7d12d6db6da6c9eeb6bc37538426697206f87c Oct 02 14:33:50 crc kubenswrapper[4717]: I1002 14:33:50.371201 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-t8wzp" event={"ID":"0e56647e-922d-435a-a84d-1c1910c2391f","Type":"ContainerStarted","Data":"3aedc51f4e6e436c8dff5d90daa0478767c4e7f40ae138a1a0ddb3037c1ce774"} Oct 02 14:33:50 crc kubenswrapper[4717]: I1002 14:33:50.371541 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-t8wzp" event={"ID":"0e56647e-922d-435a-a84d-1c1910c2391f","Type":"ContainerStarted","Data":"c0391a2fbe5646bb1782fccb4c7d12d6db6da6c9eeb6bc37538426697206f87c"} Oct 02 14:33:50 crc kubenswrapper[4717]: I1002 14:33:50.383213 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-t8wzp" podStartSLOduration=1.810544593 podStartE2EDuration="2.383198155s" podCreationTimestamp="2025-10-02 14:33:48 +0000 UTC" firstStartedPulling="2025-10-02 14:33:49.495889721 +0000 UTC m=+780.347744167" lastFinishedPulling="2025-10-02 14:33:50.068543283 +0000 UTC m=+780.920397729" observedRunningTime="2025-10-02 14:33:50.382519287 +0000 UTC m=+781.234373733" watchObservedRunningTime="2025-10-02 14:33:50.383198155 +0000 UTC m=+781.235052601" Oct 02 14:33:50 crc kubenswrapper[4717]: I1002 14:33:50.847076 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a381f248-1d86-4696-ac55-8e7d3252ebf6" path="/var/lib/kubelet/pods/a381f248-1d86-4696-ac55-8e7d3252ebf6/volumes" Oct 02 14:33:59 crc kubenswrapper[4717]: I1002 14:33:59.078679 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:33:59 crc kubenswrapper[4717]: I1002 14:33:59.079664 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:33:59 crc kubenswrapper[4717]: I1002 14:33:59.114456 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:33:59 crc kubenswrapper[4717]: I1002 14:33:59.500021 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-t8wzp" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.585169 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2"] Oct 02 14:34:06 crc kubenswrapper[4717]: E1002 14:34:06.585997 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a381f248-1d86-4696-ac55-8e7d3252ebf6" containerName="registry-server" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.586015 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="a381f248-1d86-4696-ac55-8e7d3252ebf6" containerName="registry-server" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.586168 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="a381f248-1d86-4696-ac55-8e7d3252ebf6" containerName="registry-server" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.587150 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.590345 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dzqss" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.596868 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2"] Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.680201 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-util\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.680266 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh7sg\" (UniqueName: \"kubernetes.io/projected/29ecebf2-e728-4f9c-8f0b-060eda32da1e-kube-api-access-gh7sg\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.680408 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-bundle\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.781507 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-util\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.781552 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh7sg\" (UniqueName: \"kubernetes.io/projected/29ecebf2-e728-4f9c-8f0b-060eda32da1e-kube-api-access-gh7sg\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.781594 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-bundle\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.782187 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-bundle\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.782182 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-util\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.810598 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh7sg\" (UniqueName: \"kubernetes.io/projected/29ecebf2-e728-4f9c-8f0b-060eda32da1e-kube-api-access-gh7sg\") pod \"ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:06 crc kubenswrapper[4717]: I1002 14:34:06.910417 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:07 crc kubenswrapper[4717]: I1002 14:34:07.368890 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2"] Oct 02 14:34:07 crc kubenswrapper[4717]: W1002 14:34:07.380168 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29ecebf2_e728_4f9c_8f0b_060eda32da1e.slice/crio-7c82d1671c260d13882314f4c3d5cdf00e542a0c2c38c4f21cb6068a412e4f8c WatchSource:0}: Error finding container 7c82d1671c260d13882314f4c3d5cdf00e542a0c2c38c4f21cb6068a412e4f8c: Status 404 returned error can't find the container with id 7c82d1671c260d13882314f4c3d5cdf00e542a0c2c38c4f21cb6068a412e4f8c Oct 02 14:34:07 crc kubenswrapper[4717]: I1002 14:34:07.494640 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" event={"ID":"29ecebf2-e728-4f9c-8f0b-060eda32da1e","Type":"ContainerStarted","Data":"7c82d1671c260d13882314f4c3d5cdf00e542a0c2c38c4f21cb6068a412e4f8c"} Oct 02 14:34:08 crc kubenswrapper[4717]: I1002 14:34:08.502205 4717 generic.go:334] "Generic (PLEG): container finished" podID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerID="440ec9c36ea620848f2f5d58c52cb09d5ffb1a8c1012848a71154625773fe7c4" exitCode=0 Oct 02 14:34:08 crc kubenswrapper[4717]: I1002 14:34:08.502246 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" event={"ID":"29ecebf2-e728-4f9c-8f0b-060eda32da1e","Type":"ContainerDied","Data":"440ec9c36ea620848f2f5d58c52cb09d5ffb1a8c1012848a71154625773fe7c4"} Oct 02 14:34:08 crc kubenswrapper[4717]: I1002 14:34:08.753648 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7rq"] Oct 02 14:34:08 crc kubenswrapper[4717]: I1002 14:34:08.755271 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:08 crc kubenswrapper[4717]: I1002 14:34:08.759594 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7rq"] Oct 02 14:34:08 crc kubenswrapper[4717]: I1002 14:34:08.905391 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-catalog-content\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:08 crc kubenswrapper[4717]: I1002 14:34:08.905461 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-utilities\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:08 crc kubenswrapper[4717]: I1002 14:34:08.905600 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlr87\" (UniqueName: \"kubernetes.io/projected/07451fa9-6b32-496c-b4f2-8021140c4d54-kube-api-access-wlr87\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.006576 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-catalog-content\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.006628 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-utilities\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.006690 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlr87\" (UniqueName: \"kubernetes.io/projected/07451fa9-6b32-496c-b4f2-8021140c4d54-kube-api-access-wlr87\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.007714 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-catalog-content\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.007756 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-utilities\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.030823 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlr87\" (UniqueName: \"kubernetes.io/projected/07451fa9-6b32-496c-b4f2-8021140c4d54-kube-api-access-wlr87\") pod \"redhat-marketplace-lf7rq\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.070950 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.511038 4717 generic.go:334] "Generic (PLEG): container finished" podID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerID="892676f312a5b236e83a37a99a3e96d8eada9bcde5603e584de6ff30233803cc" exitCode=0 Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.511193 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" event={"ID":"29ecebf2-e728-4f9c-8f0b-060eda32da1e","Type":"ContainerDied","Data":"892676f312a5b236e83a37a99a3e96d8eada9bcde5603e584de6ff30233803cc"} Oct 02 14:34:09 crc kubenswrapper[4717]: I1002 14:34:09.528058 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7rq"] Oct 02 14:34:10 crc kubenswrapper[4717]: I1002 14:34:10.518676 4717 generic.go:334] "Generic (PLEG): container finished" podID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerID="51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb" exitCode=0 Oct 02 14:34:10 crc kubenswrapper[4717]: I1002 14:34:10.518843 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7rq" event={"ID":"07451fa9-6b32-496c-b4f2-8021140c4d54","Type":"ContainerDied","Data":"51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb"} Oct 02 14:34:10 crc kubenswrapper[4717]: I1002 14:34:10.519272 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7rq" event={"ID":"07451fa9-6b32-496c-b4f2-8021140c4d54","Type":"ContainerStarted","Data":"a08da67209ef5f8502fc6848f5f1e7b3d7d952a9e7ec035a8f32771a6365aab6"} Oct 02 14:34:10 crc kubenswrapper[4717]: I1002 14:34:10.524463 4717 generic.go:334] "Generic (PLEG): container finished" podID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerID="9d62bc711ceb546440b7cd1e22e4bfef61ffa2fc3edd5761fc29436280efba79" exitCode=0 Oct 02 14:34:10 crc kubenswrapper[4717]: I1002 14:34:10.524542 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" event={"ID":"29ecebf2-e728-4f9c-8f0b-060eda32da1e","Type":"ContainerDied","Data":"9d62bc711ceb546440b7cd1e22e4bfef61ffa2fc3edd5761fc29436280efba79"} Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.534023 4717 generic.go:334] "Generic (PLEG): container finished" podID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerID="1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386" exitCode=0 Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.534102 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7rq" event={"ID":"07451fa9-6b32-496c-b4f2-8021140c4d54","Type":"ContainerDied","Data":"1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386"} Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.824763 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.946639 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-util\") pod \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.946704 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-bundle\") pod \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.946797 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh7sg\" (UniqueName: \"kubernetes.io/projected/29ecebf2-e728-4f9c-8f0b-060eda32da1e-kube-api-access-gh7sg\") pod \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\" (UID: \"29ecebf2-e728-4f9c-8f0b-060eda32da1e\") " Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.948585 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-bundle" (OuterVolumeSpecName: "bundle") pod "29ecebf2-e728-4f9c-8f0b-060eda32da1e" (UID: "29ecebf2-e728-4f9c-8f0b-060eda32da1e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.954103 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29ecebf2-e728-4f9c-8f0b-060eda32da1e-kube-api-access-gh7sg" (OuterVolumeSpecName: "kube-api-access-gh7sg") pod "29ecebf2-e728-4f9c-8f0b-060eda32da1e" (UID: "29ecebf2-e728-4f9c-8f0b-060eda32da1e"). InnerVolumeSpecName "kube-api-access-gh7sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:34:11 crc kubenswrapper[4717]: I1002 14:34:11.968906 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-util" (OuterVolumeSpecName: "util") pod "29ecebf2-e728-4f9c-8f0b-060eda32da1e" (UID: "29ecebf2-e728-4f9c-8f0b-060eda32da1e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.048961 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh7sg\" (UniqueName: \"kubernetes.io/projected/29ecebf2-e728-4f9c-8f0b-060eda32da1e-kube-api-access-gh7sg\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.049025 4717 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-util\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.049050 4717 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/29ecebf2-e728-4f9c-8f0b-060eda32da1e-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.149616 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hfdm8"] Oct 02 14:34:12 crc kubenswrapper[4717]: E1002 14:34:12.150391 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerName="extract" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.150423 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerName="extract" Oct 02 14:34:12 crc kubenswrapper[4717]: E1002 14:34:12.150453 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerName="util" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.150466 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerName="util" Oct 02 14:34:12 crc kubenswrapper[4717]: E1002 14:34:12.150492 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerName="pull" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.150505 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerName="pull" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.150748 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="29ecebf2-e728-4f9c-8f0b-060eda32da1e" containerName="extract" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.152572 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.163656 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hfdm8"] Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.252190 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-catalog-content\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.252272 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-utilities\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.252298 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w42qs\" (UniqueName: \"kubernetes.io/projected/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-kube-api-access-w42qs\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.353829 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-catalog-content\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.353921 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-utilities\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.353969 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w42qs\" (UniqueName: \"kubernetes.io/projected/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-kube-api-access-w42qs\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.354922 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-catalog-content\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.354953 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-utilities\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.373599 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w42qs\" (UniqueName: \"kubernetes.io/projected/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-kube-api-access-w42qs\") pod \"community-operators-hfdm8\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.496989 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.557845 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" event={"ID":"29ecebf2-e728-4f9c-8f0b-060eda32da1e","Type":"ContainerDied","Data":"7c82d1671c260d13882314f4c3d5cdf00e542a0c2c38c4f21cb6068a412e4f8c"} Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.557886 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c82d1671c260d13882314f4c3d5cdf00e542a0c2c38c4f21cb6068a412e4f8c" Oct 02 14:34:12 crc kubenswrapper[4717]: I1002 14:34:12.558865 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2" Oct 02 14:34:13 crc kubenswrapper[4717]: I1002 14:34:13.016487 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hfdm8"] Oct 02 14:34:13 crc kubenswrapper[4717]: W1002 14:34:13.022265 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod788b7ab6_8dc3_478f_98f2_402b8f0ca3dc.slice/crio-ac11d800181b99d24504b7b929be34e855e6bc1c56c4af12a2bea41c83336d1f WatchSource:0}: Error finding container ac11d800181b99d24504b7b929be34e855e6bc1c56c4af12a2bea41c83336d1f: Status 404 returned error can't find the container with id ac11d800181b99d24504b7b929be34e855e6bc1c56c4af12a2bea41c83336d1f Oct 02 14:34:13 crc kubenswrapper[4717]: I1002 14:34:13.571347 4717 generic.go:334] "Generic (PLEG): container finished" podID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerID="03cca56571727012af34e15403c2223046348a313b223e742f4dfb3b6179d576" exitCode=0 Oct 02 14:34:13 crc kubenswrapper[4717]: I1002 14:34:13.571411 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hfdm8" event={"ID":"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc","Type":"ContainerDied","Data":"03cca56571727012af34e15403c2223046348a313b223e742f4dfb3b6179d576"} Oct 02 14:34:13 crc kubenswrapper[4717]: I1002 14:34:13.571473 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hfdm8" event={"ID":"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc","Type":"ContainerStarted","Data":"ac11d800181b99d24504b7b929be34e855e6bc1c56c4af12a2bea41c83336d1f"} Oct 02 14:34:13 crc kubenswrapper[4717]: I1002 14:34:13.577025 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7rq" event={"ID":"07451fa9-6b32-496c-b4f2-8021140c4d54","Type":"ContainerStarted","Data":"598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc"} Oct 02 14:34:13 crc kubenswrapper[4717]: I1002 14:34:13.623103 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lf7rq" podStartSLOduration=3.725388452 podStartE2EDuration="5.623078622s" podCreationTimestamp="2025-10-02 14:34:08 +0000 UTC" firstStartedPulling="2025-10-02 14:34:10.521981327 +0000 UTC m=+801.373835783" lastFinishedPulling="2025-10-02 14:34:12.419671507 +0000 UTC m=+803.271525953" observedRunningTime="2025-10-02 14:34:13.618730262 +0000 UTC m=+804.470584728" watchObservedRunningTime="2025-10-02 14:34:13.623078622 +0000 UTC m=+804.474933078" Oct 02 14:34:14 crc kubenswrapper[4717]: I1002 14:34:14.583707 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hfdm8" event={"ID":"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc","Type":"ContainerStarted","Data":"5c75dc167f7b5a97cdde79484977e745d07482f3d4a4d0ad9cc208d18d6165ee"} Oct 02 14:34:15 crc kubenswrapper[4717]: I1002 14:34:15.594696 4717 generic.go:334] "Generic (PLEG): container finished" podID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerID="5c75dc167f7b5a97cdde79484977e745d07482f3d4a4d0ad9cc208d18d6165ee" exitCode=0 Oct 02 14:34:15 crc kubenswrapper[4717]: I1002 14:34:15.594830 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hfdm8" event={"ID":"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc","Type":"ContainerDied","Data":"5c75dc167f7b5a97cdde79484977e745d07482f3d4a4d0ad9cc208d18d6165ee"} Oct 02 14:34:16 crc kubenswrapper[4717]: I1002 14:34:16.604560 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hfdm8" event={"ID":"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc","Type":"ContainerStarted","Data":"0338f453617473b0c852ea1d400e839151c4a2cd76eec911f5a72349833b5ced"} Oct 02 14:34:16 crc kubenswrapper[4717]: I1002 14:34:16.621455 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hfdm8" podStartSLOduration=2.190918352 podStartE2EDuration="4.621422412s" podCreationTimestamp="2025-10-02 14:34:12 +0000 UTC" firstStartedPulling="2025-10-02 14:34:13.575054926 +0000 UTC m=+804.426909382" lastFinishedPulling="2025-10-02 14:34:16.005558986 +0000 UTC m=+806.857413442" observedRunningTime="2025-10-02 14:34:16.620307611 +0000 UTC m=+807.472162067" watchObservedRunningTime="2025-10-02 14:34:16.621422412 +0000 UTC m=+807.473276858" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.071636 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.072050 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.135731 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.214284 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct"] Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.215140 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.220118 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.226528 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-tkcrp" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.246567 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct"] Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.359346 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/882db0a2-0e31-4202-8286-9435da5165a9-webhook-cert\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.359573 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cks5t\" (UniqueName: \"kubernetes.io/projected/882db0a2-0e31-4202-8286-9435da5165a9-kube-api-access-cks5t\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.359826 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/882db0a2-0e31-4202-8286-9435da5165a9-apiservice-cert\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.460749 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/882db0a2-0e31-4202-8286-9435da5165a9-webhook-cert\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.460816 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cks5t\" (UniqueName: \"kubernetes.io/projected/882db0a2-0e31-4202-8286-9435da5165a9-kube-api-access-cks5t\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.460906 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/882db0a2-0e31-4202-8286-9435da5165a9-apiservice-cert\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.479692 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/882db0a2-0e31-4202-8286-9435da5165a9-apiservice-cert\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.480599 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/882db0a2-0e31-4202-8286-9435da5165a9-webhook-cert\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.483264 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cks5t\" (UniqueName: \"kubernetes.io/projected/882db0a2-0e31-4202-8286-9435da5165a9-kube-api-access-cks5t\") pod \"infra-operator-controller-manager-7595d7c777-4hnct\" (UID: \"882db0a2-0e31-4202-8286-9435da5165a9\") " pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.539036 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:19 crc kubenswrapper[4717]: I1002 14:34:19.716508 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:20 crc kubenswrapper[4717]: I1002 14:34:20.012489 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct"] Oct 02 14:34:20 crc kubenswrapper[4717]: I1002 14:34:20.637767 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" event={"ID":"882db0a2-0e31-4202-8286-9435da5165a9","Type":"ContainerStarted","Data":"fe744f74f4f278a99170e8cbb6fc36833aeae287baafb07ef2bb885e6cda8933"} Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.497872 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.498298 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.537736 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.659953 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" event={"ID":"882db0a2-0e31-4202-8286-9435da5165a9","Type":"ContainerStarted","Data":"ad10a102e917ad58e02010410d098f2e26f31396807bf14ccf12233408ff8da3"} Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.660012 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" event={"ID":"882db0a2-0e31-4202-8286-9435da5165a9","Type":"ContainerStarted","Data":"f385c2b375ac7002ef9019e6e81a396cf396f0270cb7d247ff06b4fd6d24569a"} Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.693595 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" podStartSLOduration=2.006937003 podStartE2EDuration="3.693565168s" podCreationTimestamp="2025-10-02 14:34:19 +0000 UTC" firstStartedPulling="2025-10-02 14:34:20.015282069 +0000 UTC m=+810.867136515" lastFinishedPulling="2025-10-02 14:34:21.701910234 +0000 UTC m=+812.553764680" observedRunningTime="2025-10-02 14:34:22.687467791 +0000 UTC m=+813.539322237" watchObservedRunningTime="2025-10-02 14:34:22.693565168 +0000 UTC m=+813.545419614" Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.704076 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.937524 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7rq"] Oct 02 14:34:22 crc kubenswrapper[4717]: I1002 14:34:22.937973 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lf7rq" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerName="registry-server" containerID="cri-o://598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc" gracePeriod=2 Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.361863 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.535143 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-catalog-content\") pod \"07451fa9-6b32-496c-b4f2-8021140c4d54\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.535238 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-utilities\") pod \"07451fa9-6b32-496c-b4f2-8021140c4d54\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.535322 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlr87\" (UniqueName: \"kubernetes.io/projected/07451fa9-6b32-496c-b4f2-8021140c4d54-kube-api-access-wlr87\") pod \"07451fa9-6b32-496c-b4f2-8021140c4d54\" (UID: \"07451fa9-6b32-496c-b4f2-8021140c4d54\") " Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.537165 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-utilities" (OuterVolumeSpecName: "utilities") pod "07451fa9-6b32-496c-b4f2-8021140c4d54" (UID: "07451fa9-6b32-496c-b4f2-8021140c4d54"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.545879 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07451fa9-6b32-496c-b4f2-8021140c4d54-kube-api-access-wlr87" (OuterVolumeSpecName: "kube-api-access-wlr87") pod "07451fa9-6b32-496c-b4f2-8021140c4d54" (UID: "07451fa9-6b32-496c-b4f2-8021140c4d54"). InnerVolumeSpecName "kube-api-access-wlr87". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.552127 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07451fa9-6b32-496c-b4f2-8021140c4d54" (UID: "07451fa9-6b32-496c-b4f2-8021140c4d54"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.636535 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlr87\" (UniqueName: \"kubernetes.io/projected/07451fa9-6b32-496c-b4f2-8021140c4d54-kube-api-access-wlr87\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.636573 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.636590 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07451fa9-6b32-496c-b4f2-8021140c4d54-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.668534 4717 generic.go:334] "Generic (PLEG): container finished" podID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerID="598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc" exitCode=0 Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.668596 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7rq" event={"ID":"07451fa9-6b32-496c-b4f2-8021140c4d54","Type":"ContainerDied","Data":"598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc"} Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.668644 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lf7rq" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.668666 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7rq" event={"ID":"07451fa9-6b32-496c-b4f2-8021140c4d54","Type":"ContainerDied","Data":"a08da67209ef5f8502fc6848f5f1e7b3d7d952a9e7ec035a8f32771a6365aab6"} Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.668688 4717 scope.go:117] "RemoveContainer" containerID="598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.668996 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.690671 4717 scope.go:117] "RemoveContainer" containerID="1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.708146 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7rq"] Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.713570 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7rq"] Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.735473 4717 scope.go:117] "RemoveContainer" containerID="51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.756678 4717 scope.go:117] "RemoveContainer" containerID="598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc" Oct 02 14:34:23 crc kubenswrapper[4717]: E1002 14:34:23.757361 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc\": container with ID starting with 598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc not found: ID does not exist" containerID="598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.757390 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc"} err="failed to get container status \"598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc\": rpc error: code = NotFound desc = could not find container \"598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc\": container with ID starting with 598e3aef55a764a62858b7d7f5e9e433bd8e4a1cd001532ac93f4842732a70dc not found: ID does not exist" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.757414 4717 scope.go:117] "RemoveContainer" containerID="1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386" Oct 02 14:34:23 crc kubenswrapper[4717]: E1002 14:34:23.757812 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386\": container with ID starting with 1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386 not found: ID does not exist" containerID="1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.757849 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386"} err="failed to get container status \"1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386\": rpc error: code = NotFound desc = could not find container \"1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386\": container with ID starting with 1b53e1752571c4e15e7f8151a53f7d4423eb31333811c1280110d5f96a7a2386 not found: ID does not exist" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.757875 4717 scope.go:117] "RemoveContainer" containerID="51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb" Oct 02 14:34:23 crc kubenswrapper[4717]: E1002 14:34:23.758339 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb\": container with ID starting with 51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb not found: ID does not exist" containerID="51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb" Oct 02 14:34:23 crc kubenswrapper[4717]: I1002 14:34:23.758387 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb"} err="failed to get container status \"51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb\": rpc error: code = NotFound desc = could not find container \"51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb\": container with ID starting with 51502a3d6a1693a399b3b7e2bf1324e191ced860c484de063dc8d17928a883fb not found: ID does not exist" Oct 02 14:34:24 crc kubenswrapper[4717]: I1002 14:34:24.846656 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" path="/var/lib/kubelet/pods/07451fa9-6b32-496c-b4f2-8021140c4d54/volumes" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.337184 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hfdm8"] Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.337470 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hfdm8" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerName="registry-server" containerID="cri-o://0338f453617473b0c852ea1d400e839151c4a2cd76eec911f5a72349833b5ced" gracePeriod=2 Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.505676 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Oct 02 14:34:27 crc kubenswrapper[4717]: E1002 14:34:27.506229 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerName="extract-utilities" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.506245 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerName="extract-utilities" Oct 02 14:34:27 crc kubenswrapper[4717]: E1002 14:34:27.506254 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerName="extract-content" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.506259 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerName="extract-content" Oct 02 14:34:27 crc kubenswrapper[4717]: E1002 14:34:27.506275 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerName="registry-server" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.506281 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerName="registry-server" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.506391 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="07451fa9-6b32-496c-b4f2-8021140c4d54" containerName="registry-server" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.507062 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.510355 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openshift-service-ca.crt" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.510723 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"galera-openstack-dockercfg-h9xd2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.510736 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"kube-root-ca.crt" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.510736 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config-data" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.510876 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"osp-secret" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.511224 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.517740 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.518916 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.521953 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.522781 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.528980 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.533654 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.547362 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.592495 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-kolla-config\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.592619 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c299m\" (UniqueName: \"kubernetes.io/projected/5de254a6-3038-4865-b5b3-2efd0b6fe371-kube-api-access-c299m\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.592795 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5de254a6-3038-4865-b5b3-2efd0b6fe371-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.592836 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-config-data-default\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.592915 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5de254a6-3038-4865-b5b3-2efd0b6fe371-secrets\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.592992 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.593039 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694096 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-operator-scripts\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694134 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-kolla-config\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694155 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694176 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/50f14d7b-2533-4238-ae56-1416dd65e626-config-data-generated\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694199 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-kolla-config\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694223 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694244 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-kolla-config\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694265 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694283 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j827z\" (UniqueName: \"kubernetes.io/projected/50f14d7b-2533-4238-ae56-1416dd65e626-kube-api-access-j827z\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694304 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c299m\" (UniqueName: \"kubernetes.io/projected/5de254a6-3038-4865-b5b3-2efd0b6fe371-kube-api-access-c299m\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694323 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/27e47c56-7639-484b-a693-b8cb67491d57-secrets\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694340 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694361 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/50f14d7b-2533-4238-ae56-1416dd65e626-secrets\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694384 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-config-data-default\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694399 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5de254a6-3038-4865-b5b3-2efd0b6fe371-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694416 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-config-data-default\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694437 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-operator-scripts\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694494 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") device mount path \"/mnt/openstack/pv12\"" pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694662 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-config-data-default\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694753 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5de254a6-3038-4865-b5b3-2efd0b6fe371-secrets\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694834 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/27e47c56-7639-484b-a693-b8cb67491d57-config-data-generated\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694885 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlplp\" (UniqueName: \"kubernetes.io/projected/27e47c56-7639-484b-a693-b8cb67491d57-kube-api-access-tlplp\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.694838 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5de254a6-3038-4865-b5b3-2efd0b6fe371-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.696375 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-config-data-default\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.696615 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-kolla-config\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.696777 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5de254a6-3038-4865-b5b3-2efd0b6fe371-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.700674 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5de254a6-3038-4865-b5b3-2efd0b6fe371-secrets\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.710660 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c299m\" (UniqueName: \"kubernetes.io/projected/5de254a6-3038-4865-b5b3-2efd0b6fe371-kube-api-access-c299m\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.714676 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"5de254a6-3038-4865-b5b3-2efd0b6fe371\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.717206 4717 generic.go:334] "Generic (PLEG): container finished" podID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerID="0338f453617473b0c852ea1d400e839151c4a2cd76eec911f5a72349833b5ced" exitCode=0 Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.717252 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hfdm8" event={"ID":"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc","Type":"ContainerDied","Data":"0338f453617473b0c852ea1d400e839151c4a2cd76eec911f5a72349833b5ced"} Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797147 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlplp\" (UniqueName: \"kubernetes.io/projected/27e47c56-7639-484b-a693-b8cb67491d57-kube-api-access-tlplp\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797240 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-operator-scripts\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797272 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-kolla-config\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797302 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/50f14d7b-2533-4238-ae56-1416dd65e626-config-data-generated\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797336 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-kolla-config\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797370 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797399 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j827z\" (UniqueName: \"kubernetes.io/projected/50f14d7b-2533-4238-ae56-1416dd65e626-kube-api-access-j827z\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797442 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/27e47c56-7639-484b-a693-b8cb67491d57-secrets\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797467 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797505 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/50f14d7b-2533-4238-ae56-1416dd65e626-secrets\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797538 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-config-data-default\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797592 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-operator-scripts\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797624 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-config-data-default\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.797671 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/27e47c56-7639-484b-a693-b8cb67491d57-config-data-generated\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.798319 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") device mount path \"/mnt/openstack/pv05\"" pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.798951 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-kolla-config\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.799108 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") device mount path \"/mnt/openstack/pv01\"" pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.799129 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/27e47c56-7639-484b-a693-b8cb67491d57-config-data-generated\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.799205 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-kolla-config\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.799291 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-config-data-default\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.799843 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27e47c56-7639-484b-a693-b8cb67491d57-operator-scripts\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.799843 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-operator-scripts\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.800344 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/50f14d7b-2533-4238-ae56-1416dd65e626-config-data-default\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.800964 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/50f14d7b-2533-4238-ae56-1416dd65e626-config-data-generated\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.803700 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/27e47c56-7639-484b-a693-b8cb67491d57-secrets\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.809320 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/50f14d7b-2533-4238-ae56-1416dd65e626-secrets\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.815638 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlplp\" (UniqueName: \"kubernetes.io/projected/27e47c56-7639-484b-a693-b8cb67491d57-kube-api-access-tlplp\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.816642 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j827z\" (UniqueName: \"kubernetes.io/projected/50f14d7b-2533-4238-ae56-1416dd65e626-kube-api-access-j827z\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.822763 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-1\" (UID: \"27e47c56-7639-484b-a693-b8cb67491d57\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.826377 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"50f14d7b-2533-4238-ae56-1416dd65e626\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.838229 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.845853 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:27 crc kubenswrapper[4717]: I1002 14:34:27.862168 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.281633 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.306790 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.389105 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.392394 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Oct 02 14:34:28 crc kubenswrapper[4717]: W1002 14:34:28.398926 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50f14d7b_2533_4238_ae56_1416dd65e626.slice/crio-6c153af9aa301a299cd24c36bbd92635fcc77c40da19302ce7e203ad298717e5 WatchSource:0}: Error finding container 6c153af9aa301a299cd24c36bbd92635fcc77c40da19302ce7e203ad298717e5: Status 404 returned error can't find the container with id 6c153af9aa301a299cd24c36bbd92635fcc77c40da19302ce7e203ad298717e5 Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.407640 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w42qs\" (UniqueName: \"kubernetes.io/projected/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-kube-api-access-w42qs\") pod \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.407725 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-utilities\") pod \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.407767 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-catalog-content\") pod \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\" (UID: \"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc\") " Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.411982 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-utilities" (OuterVolumeSpecName: "utilities") pod "788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" (UID: "788b7ab6-8dc3-478f-98f2-402b8f0ca3dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.413081 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-kube-api-access-w42qs" (OuterVolumeSpecName: "kube-api-access-w42qs") pod "788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" (UID: "788b7ab6-8dc3-478f-98f2-402b8f0ca3dc"). InnerVolumeSpecName "kube-api-access-w42qs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.464340 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" (UID: "788b7ab6-8dc3-478f-98f2-402b8f0ca3dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.509125 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w42qs\" (UniqueName: \"kubernetes.io/projected/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-kube-api-access-w42qs\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.509167 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.509177 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.724104 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"27e47c56-7639-484b-a693-b8cb67491d57","Type":"ContainerStarted","Data":"4763a658811629e4773d9197ba7a5ad733af6de663e04286b28a340c0c1143c2"} Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.727137 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hfdm8" event={"ID":"788b7ab6-8dc3-478f-98f2-402b8f0ca3dc","Type":"ContainerDied","Data":"ac11d800181b99d24504b7b929be34e855e6bc1c56c4af12a2bea41c83336d1f"} Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.727232 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hfdm8" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.727364 4717 scope.go:117] "RemoveContainer" containerID="0338f453617473b0c852ea1d400e839151c4a2cd76eec911f5a72349833b5ced" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.730478 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"5de254a6-3038-4865-b5b3-2efd0b6fe371","Type":"ContainerStarted","Data":"85c21badf93d26ced8fe377ca2c05900b0bd916b84aa0d77f769482ae1103509"} Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.733530 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"50f14d7b-2533-4238-ae56-1416dd65e626","Type":"ContainerStarted","Data":"6c153af9aa301a299cd24c36bbd92635fcc77c40da19302ce7e203ad298717e5"} Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.746184 4717 scope.go:117] "RemoveContainer" containerID="5c75dc167f7b5a97cdde79484977e745d07482f3d4a4d0ad9cc208d18d6165ee" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.764224 4717 scope.go:117] "RemoveContainer" containerID="03cca56571727012af34e15403c2223046348a313b223e742f4dfb3b6179d576" Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.778206 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hfdm8"] Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.784740 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hfdm8"] Oct 02 14:34:28 crc kubenswrapper[4717]: I1002 14:34:28.847026 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" path="/var/lib/kubelet/pods/788b7ab6-8dc3-478f-98f2-402b8f0ca3dc/volumes" Oct 02 14:34:29 crc kubenswrapper[4717]: I1002 14:34:29.543848 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7595d7c777-4hnct" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.754091 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-22l68"] Oct 02 14:34:35 crc kubenswrapper[4717]: E1002 14:34:35.755814 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerName="registry-server" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.755832 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerName="registry-server" Oct 02 14:34:35 crc kubenswrapper[4717]: E1002 14:34:35.755852 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerName="extract-content" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.755860 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerName="extract-content" Oct 02 14:34:35 crc kubenswrapper[4717]: E1002 14:34:35.755873 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerName="extract-utilities" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.755881 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerName="extract-utilities" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.756032 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="788b7ab6-8dc3-478f-98f2-402b8f0ca3dc" containerName="registry-server" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.757031 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.760546 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-22l68"] Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.938666 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc5t8\" (UniqueName: \"kubernetes.io/projected/41631e25-77b3-41c1-a8eb-f2dfe128fa94-kube-api-access-jc5t8\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.939029 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-utilities\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:35 crc kubenswrapper[4717]: I1002 14:34:35.939126 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-catalog-content\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.041036 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-catalog-content\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.041123 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc5t8\" (UniqueName: \"kubernetes.io/projected/41631e25-77b3-41c1-a8eb-f2dfe128fa94-kube-api-access-jc5t8\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.041149 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-utilities\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.041529 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-catalog-content\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.041596 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-utilities\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.060965 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc5t8\" (UniqueName: \"kubernetes.io/projected/41631e25-77b3-41c1-a8eb-f2dfe128fa94-kube-api-access-jc5t8\") pod \"redhat-operators-22l68\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.079423 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.819590 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/memcached-0"] Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.820302 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.822543 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"memcached-config-data" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.822733 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"memcached-memcached-dockercfg-7br2q" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.833355 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/memcached-0"] Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.967747 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c19128ae-cc4b-47e3-a572-88003d524ea2-kolla-config\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.968803 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c19128ae-cc4b-47e3-a572-88003d524ea2-config-data\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:36 crc kubenswrapper[4717]: I1002 14:34:36.968854 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9dpf\" (UniqueName: \"kubernetes.io/projected/c19128ae-cc4b-47e3-a572-88003d524ea2-kube-api-access-t9dpf\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:37 crc kubenswrapper[4717]: I1002 14:34:37.070100 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c19128ae-cc4b-47e3-a572-88003d524ea2-kolla-config\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:37 crc kubenswrapper[4717]: I1002 14:34:37.070163 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c19128ae-cc4b-47e3-a572-88003d524ea2-config-data\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:37 crc kubenswrapper[4717]: I1002 14:34:37.070195 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9dpf\" (UniqueName: \"kubernetes.io/projected/c19128ae-cc4b-47e3-a572-88003d524ea2-kube-api-access-t9dpf\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:37 crc kubenswrapper[4717]: I1002 14:34:37.071220 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c19128ae-cc4b-47e3-a572-88003d524ea2-kolla-config\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:37 crc kubenswrapper[4717]: I1002 14:34:37.071412 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c19128ae-cc4b-47e3-a572-88003d524ea2-config-data\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:37 crc kubenswrapper[4717]: I1002 14:34:37.085826 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9dpf\" (UniqueName: \"kubernetes.io/projected/c19128ae-cc4b-47e3-a572-88003d524ea2-kube-api-access-t9dpf\") pod \"memcached-0\" (UID: \"c19128ae-cc4b-47e3-a572-88003d524ea2\") " pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:37 crc kubenswrapper[4717]: I1002 14:34:37.134818 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.346118 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-lck67"] Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.347329 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.349364 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-krfgv" Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.355258 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-lck67"] Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.501252 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4fv4\" (UniqueName: \"kubernetes.io/projected/c2ce2ae9-a067-4415-9152-182a699d3772-kube-api-access-w4fv4\") pod \"rabbitmq-cluster-operator-index-lck67\" (UID: \"c2ce2ae9-a067-4415-9152-182a699d3772\") " pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.603085 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4fv4\" (UniqueName: \"kubernetes.io/projected/c2ce2ae9-a067-4415-9152-182a699d3772-kube-api-access-w4fv4\") pod \"rabbitmq-cluster-operator-index-lck67\" (UID: \"c2ce2ae9-a067-4415-9152-182a699d3772\") " pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.638828 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4fv4\" (UniqueName: \"kubernetes.io/projected/c2ce2ae9-a067-4415-9152-182a699d3772-kube-api-access-w4fv4\") pod \"rabbitmq-cluster-operator-index-lck67\" (UID: \"c2ce2ae9-a067-4415-9152-182a699d3772\") " pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.769398 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:39 crc kubenswrapper[4717]: I1002 14:34:39.978374 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/memcached-0"] Oct 02 14:34:39 crc kubenswrapper[4717]: W1002 14:34:39.997678 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc19128ae_cc4b_47e3_a572_88003d524ea2.slice/crio-1bba3ccdeed931a0ddb61ea4e8c1161df1d1da85aa5bedc9d39d9b66b3d12302 WatchSource:0}: Error finding container 1bba3ccdeed931a0ddb61ea4e8c1161df1d1da85aa5bedc9d39d9b66b3d12302: Status 404 returned error can't find the container with id 1bba3ccdeed931a0ddb61ea4e8c1161df1d1da85aa5bedc9d39d9b66b3d12302 Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.043191 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-22l68"] Oct 02 14:34:40 crc kubenswrapper[4717]: W1002 14:34:40.054019 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41631e25_77b3_41c1_a8eb_f2dfe128fa94.slice/crio-fe4b3162cc558f508634cd318232c908a85f4db99d04565f2c8417b52c60473c WatchSource:0}: Error finding container fe4b3162cc558f508634cd318232c908a85f4db99d04565f2c8417b52c60473c: Status 404 returned error can't find the container with id fe4b3162cc558f508634cd318232c908a85f4db99d04565f2c8417b52c60473c Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.173890 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-lck67"] Oct 02 14:34:40 crc kubenswrapper[4717]: E1002 14:34:40.311108 4717 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41631e25_77b3_41c1_a8eb_f2dfe128fa94.slice/crio-conmon-6556dbbf015b6e0507702ce23422ea824a03bc03b6ced157125e8f2b10e26a0c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod41631e25_77b3_41c1_a8eb_f2dfe128fa94.slice/crio-6556dbbf015b6e0507702ce23422ea824a03bc03b6ced157125e8f2b10e26a0c.scope\": RecentStats: unable to find data in memory cache]" Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.812130 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/memcached-0" event={"ID":"c19128ae-cc4b-47e3-a572-88003d524ea2","Type":"ContainerStarted","Data":"1bba3ccdeed931a0ddb61ea4e8c1161df1d1da85aa5bedc9d39d9b66b3d12302"} Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.813832 4717 generic.go:334] "Generic (PLEG): container finished" podID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerID="6556dbbf015b6e0507702ce23422ea824a03bc03b6ced157125e8f2b10e26a0c" exitCode=0 Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.813870 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22l68" event={"ID":"41631e25-77b3-41c1-a8eb-f2dfe128fa94","Type":"ContainerDied","Data":"6556dbbf015b6e0507702ce23422ea824a03bc03b6ced157125e8f2b10e26a0c"} Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.813921 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22l68" event={"ID":"41631e25-77b3-41c1-a8eb-f2dfe128fa94","Type":"ContainerStarted","Data":"fe4b3162cc558f508634cd318232c908a85f4db99d04565f2c8417b52c60473c"} Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.814897 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" event={"ID":"c2ce2ae9-a067-4415-9152-182a699d3772","Type":"ContainerStarted","Data":"58af0252b645001f5318308a1445ee017d47eb9ac29f47c7bceae1d9e2952fdc"} Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.818879 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"5de254a6-3038-4865-b5b3-2efd0b6fe371","Type":"ContainerStarted","Data":"968d668c7ba4b359cb81240603fab99ee682ccec3769b8e235bb417eb452ec01"} Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.820526 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"50f14d7b-2533-4238-ae56-1416dd65e626","Type":"ContainerStarted","Data":"2a763e1c118c28ae9832bd0159380fb8770efa120ecd347de21bacfc21d48f24"} Oct 02 14:34:40 crc kubenswrapper[4717]: I1002 14:34:40.822496 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"27e47c56-7639-484b-a693-b8cb67491d57","Type":"ContainerStarted","Data":"c07eca6b3cf8256768f24239d3f812e26282b425f38ca69961d944dea32a805d"} Oct 02 14:34:43 crc kubenswrapper[4717]: I1002 14:34:43.844616 4717 generic.go:334] "Generic (PLEG): container finished" podID="50f14d7b-2533-4238-ae56-1416dd65e626" containerID="2a763e1c118c28ae9832bd0159380fb8770efa120ecd347de21bacfc21d48f24" exitCode=0 Oct 02 14:34:43 crc kubenswrapper[4717]: I1002 14:34:43.844680 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"50f14d7b-2533-4238-ae56-1416dd65e626","Type":"ContainerDied","Data":"2a763e1c118c28ae9832bd0159380fb8770efa120ecd347de21bacfc21d48f24"} Oct 02 14:34:43 crc kubenswrapper[4717]: I1002 14:34:43.849103 4717 generic.go:334] "Generic (PLEG): container finished" podID="27e47c56-7639-484b-a693-b8cb67491d57" containerID="c07eca6b3cf8256768f24239d3f812e26282b425f38ca69961d944dea32a805d" exitCode=0 Oct 02 14:34:43 crc kubenswrapper[4717]: I1002 14:34:43.849170 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"27e47c56-7639-484b-a693-b8cb67491d57","Type":"ContainerDied","Data":"c07eca6b3cf8256768f24239d3f812e26282b425f38ca69961d944dea32a805d"} Oct 02 14:34:43 crc kubenswrapper[4717]: I1002 14:34:43.851361 4717 generic.go:334] "Generic (PLEG): container finished" podID="5de254a6-3038-4865-b5b3-2efd0b6fe371" containerID="968d668c7ba4b359cb81240603fab99ee682ccec3769b8e235bb417eb452ec01" exitCode=0 Oct 02 14:34:43 crc kubenswrapper[4717]: I1002 14:34:43.851394 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"5de254a6-3038-4865-b5b3-2efd0b6fe371","Type":"ContainerDied","Data":"968d668c7ba4b359cb81240603fab99ee682ccec3769b8e235bb417eb452ec01"} Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.857466 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"50f14d7b-2533-4238-ae56-1416dd65e626","Type":"ContainerStarted","Data":"7d0846903921a4c012d2ff1b1ccd83c10a2f817cb25a8a1c8a58abbf3d349cae"} Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.859736 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"27e47c56-7639-484b-a693-b8cb67491d57","Type":"ContainerStarted","Data":"dfaf68e1d584994e373c6f595316900029f65736c9449f3f4a7053ccf4af15bc"} Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.862536 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/memcached-0" event={"ID":"c19128ae-cc4b-47e3-a572-88003d524ea2","Type":"ContainerStarted","Data":"e7837e1418d55deea2e0145ca6b8e3e543f62722109a53433ace17ae157bed30"} Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.862800 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.864554 4717 generic.go:334] "Generic (PLEG): container finished" podID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerID="63f346444d4b11a80cd088461a4f4af4a2b16fa045a38f016c7f3d0291eaad5e" exitCode=0 Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.864597 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22l68" event={"ID":"41631e25-77b3-41c1-a8eb-f2dfe128fa94","Type":"ContainerDied","Data":"63f346444d4b11a80cd088461a4f4af4a2b16fa045a38f016c7f3d0291eaad5e"} Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.866879 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" event={"ID":"c2ce2ae9-a067-4415-9152-182a699d3772","Type":"ContainerStarted","Data":"a14a07364b6dc8c148ed65180bea5cace4927c38a2e21015b4ae128e85db55fc"} Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.869588 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"5de254a6-3038-4865-b5b3-2efd0b6fe371","Type":"ContainerStarted","Data":"a279a1efad4dfeccaa916fe8c03039752a1ffd707e0259438033d753f2655ab0"} Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.876145 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-2" podStartSLOduration=7.381274217 podStartE2EDuration="18.876122433s" podCreationTimestamp="2025-10-02 14:34:26 +0000 UTC" firstStartedPulling="2025-10-02 14:34:28.401613036 +0000 UTC m=+819.253467482" lastFinishedPulling="2025-10-02 14:34:39.896461252 +0000 UTC m=+830.748315698" observedRunningTime="2025-10-02 14:34:44.875600158 +0000 UTC m=+835.727454604" watchObservedRunningTime="2025-10-02 14:34:44.876122433 +0000 UTC m=+835.727976879" Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.898440 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" podStartSLOduration=2.221717536 podStartE2EDuration="5.898418884s" podCreationTimestamp="2025-10-02 14:34:39 +0000 UTC" firstStartedPulling="2025-10-02 14:34:40.191773404 +0000 UTC m=+831.043627850" lastFinishedPulling="2025-10-02 14:34:43.868474752 +0000 UTC m=+834.720329198" observedRunningTime="2025-10-02 14:34:44.890032614 +0000 UTC m=+835.741887060" watchObservedRunningTime="2025-10-02 14:34:44.898418884 +0000 UTC m=+835.750273330" Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.915674 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-0" podStartSLOduration=7.166827431 podStartE2EDuration="18.915655656s" podCreationTimestamp="2025-10-02 14:34:26 +0000 UTC" firstStartedPulling="2025-10-02 14:34:28.308374812 +0000 UTC m=+819.160229258" lastFinishedPulling="2025-10-02 14:34:40.057203037 +0000 UTC m=+830.909057483" observedRunningTime="2025-10-02 14:34:44.912432068 +0000 UTC m=+835.764286514" watchObservedRunningTime="2025-10-02 14:34:44.915655656 +0000 UTC m=+835.767510102" Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.932562 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/memcached-0" podStartSLOduration=6.395788858 podStartE2EDuration="8.932540299s" podCreationTimestamp="2025-10-02 14:34:36 +0000 UTC" firstStartedPulling="2025-10-02 14:34:40.000511413 +0000 UTC m=+830.852365879" lastFinishedPulling="2025-10-02 14:34:42.537262874 +0000 UTC m=+833.389117320" observedRunningTime="2025-10-02 14:34:44.92965736 +0000 UTC m=+835.781511816" watchObservedRunningTime="2025-10-02 14:34:44.932540299 +0000 UTC m=+835.784394745" Oct 02 14:34:44 crc kubenswrapper[4717]: I1002 14:34:44.970551 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-1" podStartSLOduration=7.392270498 podStartE2EDuration="18.97053514s" podCreationTimestamp="2025-10-02 14:34:26 +0000 UTC" firstStartedPulling="2025-10-02 14:34:28.396138976 +0000 UTC m=+819.247993422" lastFinishedPulling="2025-10-02 14:34:39.974403618 +0000 UTC m=+830.826258064" observedRunningTime="2025-10-02 14:34:44.965767589 +0000 UTC m=+835.817622035" watchObservedRunningTime="2025-10-02 14:34:44.97053514 +0000 UTC m=+835.822389586" Oct 02 14:34:45 crc kubenswrapper[4717]: I1002 14:34:45.878873 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22l68" event={"ID":"41631e25-77b3-41c1-a8eb-f2dfe128fa94","Type":"ContainerStarted","Data":"03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891"} Oct 02 14:34:45 crc kubenswrapper[4717]: I1002 14:34:45.906204 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-22l68" podStartSLOduration=6.476299352 podStartE2EDuration="10.906173377s" podCreationTimestamp="2025-10-02 14:34:35 +0000 UTC" firstStartedPulling="2025-10-02 14:34:40.815804434 +0000 UTC m=+831.667658880" lastFinishedPulling="2025-10-02 14:34:45.245678449 +0000 UTC m=+836.097532905" observedRunningTime="2025-10-02 14:34:45.901298814 +0000 UTC m=+836.753153260" watchObservedRunningTime="2025-10-02 14:34:45.906173377 +0000 UTC m=+836.758027823" Oct 02 14:34:46 crc kubenswrapper[4717]: I1002 14:34:46.080447 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:46 crc kubenswrapper[4717]: I1002 14:34:46.080490 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:47 crc kubenswrapper[4717]: I1002 14:34:47.132954 4717 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-22l68" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="registry-server" probeResult="failure" output=< Oct 02 14:34:47 crc kubenswrapper[4717]: timeout: failed to connect service ":50051" within 1s Oct 02 14:34:47 crc kubenswrapper[4717]: > Oct 02 14:34:47 crc kubenswrapper[4717]: I1002 14:34:47.838512 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:47 crc kubenswrapper[4717]: I1002 14:34:47.838780 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:34:47 crc kubenswrapper[4717]: I1002 14:34:47.846599 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:47 crc kubenswrapper[4717]: I1002 14:34:47.846669 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:34:47 crc kubenswrapper[4717]: I1002 14:34:47.862570 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:47 crc kubenswrapper[4717]: I1002 14:34:47.862630 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:34:49 crc kubenswrapper[4717]: I1002 14:34:49.770386 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:49 crc kubenswrapper[4717]: I1002 14:34:49.770459 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:49 crc kubenswrapper[4717]: I1002 14:34:49.818749 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:49 crc kubenswrapper[4717]: I1002 14:34:49.926402 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-lck67" Oct 02 14:34:52 crc kubenswrapper[4717]: I1002 14:34:52.136208 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/memcached-0" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.580652 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc"] Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.582234 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.585589 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dzqss" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.592352 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.592479 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.592506 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8mnd\" (UniqueName: \"kubernetes.io/projected/0a5398f8-f537-4676-a305-844c9975fb95-kube-api-access-r8mnd\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.600264 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc"] Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.694261 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.694749 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.694777 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8mnd\" (UniqueName: \"kubernetes.io/projected/0a5398f8-f537-4676-a305-844c9975fb95-kube-api-access-r8mnd\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.695383 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.695427 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.717825 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8mnd\" (UniqueName: \"kubernetes.io/projected/0a5398f8-f537-4676-a305-844c9975fb95-kube-api-access-r8mnd\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:53 crc kubenswrapper[4717]: I1002 14:34:53.947364 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:34:54 crc kubenswrapper[4717]: I1002 14:34:54.406138 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc"] Oct 02 14:34:54 crc kubenswrapper[4717]: I1002 14:34:54.942175 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" event={"ID":"0a5398f8-f537-4676-a305-844c9975fb95","Type":"ContainerStarted","Data":"20494cda9b6307d23c77e996d5175434eef6ea8f05d5e17200f501868a76b167"} Oct 02 14:34:54 crc kubenswrapper[4717]: I1002 14:34:54.942579 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" event={"ID":"0a5398f8-f537-4676-a305-844c9975fb95","Type":"ContainerStarted","Data":"f9f0b94e613d186d62c8a1cb4e9b169f8ca088a22da92aa27d1e87f28b9b1476"} Oct 02 14:34:56 crc kubenswrapper[4717]: I1002 14:34:56.134579 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:56 crc kubenswrapper[4717]: I1002 14:34:56.191753 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.354635 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6kb75"] Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.356504 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.374329 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6kb75"] Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.385509 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-utilities\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.385577 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-catalog-content\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.385638 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75l9j\" (UniqueName: \"kubernetes.io/projected/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-kube-api-access-75l9j\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.486703 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75l9j\" (UniqueName: \"kubernetes.io/projected/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-kube-api-access-75l9j\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.486812 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-utilities\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.486853 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-catalog-content\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.487491 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-catalog-content\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.488148 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-utilities\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.530082 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75l9j\" (UniqueName: \"kubernetes.io/projected/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-kube-api-access-75l9j\") pod \"certified-operators-6kb75\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:34:59 crc kubenswrapper[4717]: I1002 14:34:59.680389 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:35:00 crc kubenswrapper[4717]: I1002 14:35:00.147808 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6kb75"] Oct 02 14:35:00 crc kubenswrapper[4717]: I1002 14:35:00.993964 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6kb75" event={"ID":"b93ada37-fdd7-451f-9df2-3fc3c8d1764a","Type":"ContainerStarted","Data":"57d62775fd2867b8bbfd27c560085d2891cd5940b6f50eeff1f9849a2be5375b"} Oct 02 14:35:03 crc kubenswrapper[4717]: I1002 14:35:03.939047 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-22l68"] Oct 02 14:35:03 crc kubenswrapper[4717]: I1002 14:35:03.939587 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-22l68" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="registry-server" containerID="cri-o://03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891" gracePeriod=2 Oct 02 14:35:06 crc kubenswrapper[4717]: I1002 14:35:06.036228 4717 generic.go:334] "Generic (PLEG): container finished" podID="0a5398f8-f537-4676-a305-844c9975fb95" containerID="20494cda9b6307d23c77e996d5175434eef6ea8f05d5e17200f501868a76b167" exitCode=0 Oct 02 14:35:06 crc kubenswrapper[4717]: I1002 14:35:06.036352 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" event={"ID":"0a5398f8-f537-4676-a305-844c9975fb95","Type":"ContainerDied","Data":"20494cda9b6307d23c77e996d5175434eef6ea8f05d5e17200f501868a76b167"} Oct 02 14:35:06 crc kubenswrapper[4717]: E1002 14:35:06.081621 4717 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891 is running failed: container process not found" containerID="03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891" cmd=["grpc_health_probe","-addr=:50051"] Oct 02 14:35:06 crc kubenswrapper[4717]: E1002 14:35:06.082122 4717 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891 is running failed: container process not found" containerID="03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891" cmd=["grpc_health_probe","-addr=:50051"] Oct 02 14:35:06 crc kubenswrapper[4717]: E1002 14:35:06.082376 4717 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891 is running failed: container process not found" containerID="03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891" cmd=["grpc_health_probe","-addr=:50051"] Oct 02 14:35:06 crc kubenswrapper[4717]: E1002 14:35:06.082453 4717 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-22l68" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="registry-server" Oct 02 14:35:07 crc kubenswrapper[4717]: I1002 14:35:07.057886 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6kb75" event={"ID":"b93ada37-fdd7-451f-9df2-3fc3c8d1764a","Type":"ContainerStarted","Data":"4f18a0078d55f79d49beb022e08a01817ed4f01c3889fd19a031832083979bb7"} Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.065782 4717 generic.go:334] "Generic (PLEG): container finished" podID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerID="4f18a0078d55f79d49beb022e08a01817ed4f01c3889fd19a031832083979bb7" exitCode=0 Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.065867 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6kb75" event={"ID":"b93ada37-fdd7-451f-9df2-3fc3c8d1764a","Type":"ContainerDied","Data":"4f18a0078d55f79d49beb022e08a01817ed4f01c3889fd19a031832083979bb7"} Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.068748 4717 generic.go:334] "Generic (PLEG): container finished" podID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerID="03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891" exitCode=0 Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.068771 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22l68" event={"ID":"41631e25-77b3-41c1-a8eb-f2dfe128fa94","Type":"ContainerDied","Data":"03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891"} Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.884044 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.919036 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc5t8\" (UniqueName: \"kubernetes.io/projected/41631e25-77b3-41c1-a8eb-f2dfe128fa94-kube-api-access-jc5t8\") pod \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.919087 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-utilities\") pod \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.919105 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-catalog-content\") pod \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\" (UID: \"41631e25-77b3-41c1-a8eb-f2dfe128fa94\") " Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.920593 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-utilities" (OuterVolumeSpecName: "utilities") pod "41631e25-77b3-41c1-a8eb-f2dfe128fa94" (UID: "41631e25-77b3-41c1-a8eb-f2dfe128fa94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.932575 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41631e25-77b3-41c1-a8eb-f2dfe128fa94-kube-api-access-jc5t8" (OuterVolumeSpecName: "kube-api-access-jc5t8") pod "41631e25-77b3-41c1-a8eb-f2dfe128fa94" (UID: "41631e25-77b3-41c1-a8eb-f2dfe128fa94"). InnerVolumeSpecName "kube-api-access-jc5t8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:35:08 crc kubenswrapper[4717]: I1002 14:35:08.994425 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "41631e25-77b3-41c1-a8eb-f2dfe128fa94" (UID: "41631e25-77b3-41c1-a8eb-f2dfe128fa94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.020557 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.020590 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/41631e25-77b3-41c1-a8eb-f2dfe128fa94-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.020604 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc5t8\" (UniqueName: \"kubernetes.io/projected/41631e25-77b3-41c1-a8eb-f2dfe128fa94-kube-api-access-jc5t8\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.076023 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-22l68" Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.078043 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-22l68" event={"ID":"41631e25-77b3-41c1-a8eb-f2dfe128fa94","Type":"ContainerDied","Data":"fe4b3162cc558f508634cd318232c908a85f4db99d04565f2c8417b52c60473c"} Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.078105 4717 scope.go:117] "RemoveContainer" containerID="03e4b67f1ff7dd10f262089ebf764a1c3c8573f3cec6866df801948eb14c9891" Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.104813 4717 scope.go:117] "RemoveContainer" containerID="63f346444d4b11a80cd088461a4f4af4a2b16fa045a38f016c7f3d0291eaad5e" Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.122475 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-22l68"] Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.138061 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-22l68"] Oct 02 14:35:09 crc kubenswrapper[4717]: I1002 14:35:09.166601 4717 scope.go:117] "RemoveContainer" containerID="6556dbbf015b6e0507702ce23422ea824a03bc03b6ced157125e8f2b10e26a0c" Oct 02 14:35:10 crc kubenswrapper[4717]: I1002 14:35:10.847610 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" path="/var/lib/kubelet/pods/41631e25-77b3-41c1-a8eb-f2dfe128fa94/volumes" Oct 02 14:35:18 crc kubenswrapper[4717]: I1002 14:35:18.133227 4717 generic.go:334] "Generic (PLEG): container finished" podID="0a5398f8-f537-4676-a305-844c9975fb95" containerID="4b0d23e11ea85edbde8103de14cd6b1ff941744a010e8a752b0422e44db26721" exitCode=0 Oct 02 14:35:18 crc kubenswrapper[4717]: I1002 14:35:18.133343 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" event={"ID":"0a5398f8-f537-4676-a305-844c9975fb95","Type":"ContainerDied","Data":"4b0d23e11ea85edbde8103de14cd6b1ff941744a010e8a752b0422e44db26721"} Oct 02 14:35:18 crc kubenswrapper[4717]: I1002 14:35:18.135607 4717 generic.go:334] "Generic (PLEG): container finished" podID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerID="27cec4b762e71320a4e2534eba7fb992c877dd950ca1cd4efcc56e6e955ca017" exitCode=0 Oct 02 14:35:18 crc kubenswrapper[4717]: I1002 14:35:18.135656 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6kb75" event={"ID":"b93ada37-fdd7-451f-9df2-3fc3c8d1764a","Type":"ContainerDied","Data":"27cec4b762e71320a4e2534eba7fb992c877dd950ca1cd4efcc56e6e955ca017"} Oct 02 14:35:19 crc kubenswrapper[4717]: I1002 14:35:19.144235 4717 generic.go:334] "Generic (PLEG): container finished" podID="0a5398f8-f537-4676-a305-844c9975fb95" containerID="59813e5d54d9cf28ed35380e5f92930d8692bca107bd95c5b77d38151c6879d0" exitCode=0 Oct 02 14:35:19 crc kubenswrapper[4717]: I1002 14:35:19.144311 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" event={"ID":"0a5398f8-f537-4676-a305-844c9975fb95","Type":"ContainerDied","Data":"59813e5d54d9cf28ed35380e5f92930d8692bca107bd95c5b77d38151c6879d0"} Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.051891 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.093347 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-2" Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.164763 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6kb75" event={"ID":"b93ada37-fdd7-451f-9df2-3fc3c8d1764a","Type":"ContainerStarted","Data":"f018aab85719322d29ed1bdf3fd87767004a5c70dacfe216166fdb7f3b6a28f4"} Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.190112 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6kb75" podStartSLOduration=11.090295231 podStartE2EDuration="21.190094823s" podCreationTimestamp="2025-10-02 14:34:59 +0000 UTC" firstStartedPulling="2025-10-02 14:35:09.078055487 +0000 UTC m=+859.929909933" lastFinishedPulling="2025-10-02 14:35:19.177855049 +0000 UTC m=+870.029709525" observedRunningTime="2025-10-02 14:35:20.188815268 +0000 UTC m=+871.040669714" watchObservedRunningTime="2025-10-02 14:35:20.190094823 +0000 UTC m=+871.041949269" Oct 02 14:35:20 crc kubenswrapper[4717]: E1002 14:35:20.321078 4717 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.110:50926->38.102.83.110:38315: write tcp 38.102.83.110:50926->38.102.83.110:38315: write: broken pipe Oct 02 14:35:20 crc kubenswrapper[4717]: E1002 14:35:20.423724 4717 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.110:50952->38.102.83.110:38315: write tcp 38.102.83.110:50952->38.102.83.110:38315: write: broken pipe Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.511904 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.607626 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-bundle\") pod \"0a5398f8-f537-4676-a305-844c9975fb95\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.607668 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-util\") pod \"0a5398f8-f537-4676-a305-844c9975fb95\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.607792 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8mnd\" (UniqueName: \"kubernetes.io/projected/0a5398f8-f537-4676-a305-844c9975fb95-kube-api-access-r8mnd\") pod \"0a5398f8-f537-4676-a305-844c9975fb95\" (UID: \"0a5398f8-f537-4676-a305-844c9975fb95\") " Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.608512 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-bundle" (OuterVolumeSpecName: "bundle") pod "0a5398f8-f537-4676-a305-844c9975fb95" (UID: "0a5398f8-f537-4676-a305-844c9975fb95"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.616179 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a5398f8-f537-4676-a305-844c9975fb95-kube-api-access-r8mnd" (OuterVolumeSpecName: "kube-api-access-r8mnd") pod "0a5398f8-f537-4676-a305-844c9975fb95" (UID: "0a5398f8-f537-4676-a305-844c9975fb95"). InnerVolumeSpecName "kube-api-access-r8mnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.623988 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-util" (OuterVolumeSpecName: "util") pod "0a5398f8-f537-4676-a305-844c9975fb95" (UID: "0a5398f8-f537-4676-a305-844c9975fb95"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.708968 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8mnd\" (UniqueName: \"kubernetes.io/projected/0a5398f8-f537-4676-a305-844c9975fb95-kube-api-access-r8mnd\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.709012 4717 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:20 crc kubenswrapper[4717]: I1002 14:35:20.709023 4717 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0a5398f8-f537-4676-a305-844c9975fb95-util\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:20 crc kubenswrapper[4717]: E1002 14:35:20.960519 4717 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a5398f8_f537_4676_a305_844c9975fb95.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a5398f8_f537_4676_a305_844c9975fb95.slice/crio-f9f0b94e613d186d62c8a1cb4e9b169f8ca088a22da92aa27d1e87f28b9b1476\": RecentStats: unable to find data in memory cache]" Oct 02 14:35:21 crc kubenswrapper[4717]: I1002 14:35:21.173588 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" event={"ID":"0a5398f8-f537-4676-a305-844c9975fb95","Type":"ContainerDied","Data":"f9f0b94e613d186d62c8a1cb4e9b169f8ca088a22da92aa27d1e87f28b9b1476"} Oct 02 14:35:21 crc kubenswrapper[4717]: I1002 14:35:21.173629 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc" Oct 02 14:35:21 crc kubenswrapper[4717]: I1002 14:35:21.173640 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9f0b94e613d186d62c8a1cb4e9b169f8ca088a22da92aa27d1e87f28b9b1476" Oct 02 14:35:26 crc kubenswrapper[4717]: I1002 14:35:26.784360 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:35:26 crc kubenswrapper[4717]: I1002 14:35:26.834345 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-0" Oct 02 14:35:27 crc kubenswrapper[4717]: I1002 14:35:27.892397 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="glance-kuttl-tests/openstack-galera-2" podUID="50f14d7b-2533-4238-ae56-1416dd65e626" containerName="galera" probeResult="failure" output=< Oct 02 14:35:27 crc kubenswrapper[4717]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Oct 02 14:35:27 crc kubenswrapper[4717]: > Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.680814 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.680862 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.724768 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.741866 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5"] Oct 02 14:35:29 crc kubenswrapper[4717]: E1002 14:35:29.742106 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="extract-content" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742121 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="extract-content" Oct 02 14:35:29 crc kubenswrapper[4717]: E1002 14:35:29.742139 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="extract-utilities" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742148 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="extract-utilities" Oct 02 14:35:29 crc kubenswrapper[4717]: E1002 14:35:29.742157 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a5398f8-f537-4676-a305-844c9975fb95" containerName="util" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742164 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a5398f8-f537-4676-a305-844c9975fb95" containerName="util" Oct 02 14:35:29 crc kubenswrapper[4717]: E1002 14:35:29.742178 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a5398f8-f537-4676-a305-844c9975fb95" containerName="pull" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742184 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a5398f8-f537-4676-a305-844c9975fb95" containerName="pull" Oct 02 14:35:29 crc kubenswrapper[4717]: E1002 14:35:29.742196 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="registry-server" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742203 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="registry-server" Oct 02 14:35:29 crc kubenswrapper[4717]: E1002 14:35:29.742225 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a5398f8-f537-4676-a305-844c9975fb95" containerName="extract" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742233 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a5398f8-f537-4676-a305-844c9975fb95" containerName="extract" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742356 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a5398f8-f537-4676-a305-844c9975fb95" containerName="extract" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742378 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="41631e25-77b3-41c1-a8eb-f2dfe128fa94" containerName="registry-server" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.742865 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.746715 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-cm849" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.752498 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5"] Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.861147 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkrsg\" (UniqueName: \"kubernetes.io/projected/4848f26f-88d7-4cf6-b271-f419c946be0f-kube-api-access-zkrsg\") pod \"rabbitmq-cluster-operator-779fc9694b-t85n5\" (UID: \"4848f26f-88d7-4cf6-b271-f419c946be0f\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.962593 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkrsg\" (UniqueName: \"kubernetes.io/projected/4848f26f-88d7-4cf6-b271-f419c946be0f-kube-api-access-zkrsg\") pod \"rabbitmq-cluster-operator-779fc9694b-t85n5\" (UID: \"4848f26f-88d7-4cf6-b271-f419c946be0f\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5" Oct 02 14:35:29 crc kubenswrapper[4717]: I1002 14:35:29.980787 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkrsg\" (UniqueName: \"kubernetes.io/projected/4848f26f-88d7-4cf6-b271-f419c946be0f-kube-api-access-zkrsg\") pod \"rabbitmq-cluster-operator-779fc9694b-t85n5\" (UID: \"4848f26f-88d7-4cf6-b271-f419c946be0f\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5" Oct 02 14:35:30 crc kubenswrapper[4717]: I1002 14:35:30.059803 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5" Oct 02 14:35:30 crc kubenswrapper[4717]: I1002 14:35:30.264727 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:35:30 crc kubenswrapper[4717]: I1002 14:35:30.442726 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5"] Oct 02 14:35:31 crc kubenswrapper[4717]: I1002 14:35:31.238199 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5" event={"ID":"4848f26f-88d7-4cf6-b271-f419c946be0f","Type":"ContainerStarted","Data":"ed7ff2f1d554d9607e9dd9c715fbefa90bdc758750c4595c2edeb3989e3ce183"} Oct 02 14:35:32 crc kubenswrapper[4717]: I1002 14:35:32.246590 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5" event={"ID":"4848f26f-88d7-4cf6-b271-f419c946be0f","Type":"ContainerStarted","Data":"d1eb4b31f4823e380cf791cc8052ef4fb741fd14b432652d66fa4acc5eb32ee5"} Oct 02 14:35:32 crc kubenswrapper[4717]: I1002 14:35:32.263973 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-t85n5" podStartSLOduration=1.8935755159999998 podStartE2EDuration="3.2637967s" podCreationTimestamp="2025-10-02 14:35:29 +0000 UTC" firstStartedPulling="2025-10-02 14:35:30.450158484 +0000 UTC m=+881.302012930" lastFinishedPulling="2025-10-02 14:35:31.820379668 +0000 UTC m=+882.672234114" observedRunningTime="2025-10-02 14:35:32.262256618 +0000 UTC m=+883.114111074" watchObservedRunningTime="2025-10-02 14:35:32.2637967 +0000 UTC m=+883.115651146" Oct 02 14:35:32 crc kubenswrapper[4717]: I1002 14:35:32.898725 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:35:32 crc kubenswrapper[4717]: I1002 14:35:32.940610 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6kb75"] Oct 02 14:35:32 crc kubenswrapper[4717]: I1002 14:35:32.940809 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6kb75" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerName="registry-server" containerID="cri-o://f018aab85719322d29ed1bdf3fd87767004a5c70dacfe216166fdb7f3b6a28f4" gracePeriod=2 Oct 02 14:35:32 crc kubenswrapper[4717]: I1002 14:35:32.950630 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-1" Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.254228 4717 generic.go:334] "Generic (PLEG): container finished" podID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerID="f018aab85719322d29ed1bdf3fd87767004a5c70dacfe216166fdb7f3b6a28f4" exitCode=0 Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.254298 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6kb75" event={"ID":"b93ada37-fdd7-451f-9df2-3fc3c8d1764a","Type":"ContainerDied","Data":"f018aab85719322d29ed1bdf3fd87767004a5c70dacfe216166fdb7f3b6a28f4"} Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.254333 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6kb75" event={"ID":"b93ada37-fdd7-451f-9df2-3fc3c8d1764a","Type":"ContainerDied","Data":"57d62775fd2867b8bbfd27c560085d2891cd5940b6f50eeff1f9849a2be5375b"} Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.254346 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57d62775fd2867b8bbfd27c560085d2891cd5940b6f50eeff1f9849a2be5375b" Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.291478 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.409839 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75l9j\" (UniqueName: \"kubernetes.io/projected/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-kube-api-access-75l9j\") pod \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.410253 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-catalog-content\") pod \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.410435 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-utilities\") pod \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\" (UID: \"b93ada37-fdd7-451f-9df2-3fc3c8d1764a\") " Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.411471 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-utilities" (OuterVolumeSpecName: "utilities") pod "b93ada37-fdd7-451f-9df2-3fc3c8d1764a" (UID: "b93ada37-fdd7-451f-9df2-3fc3c8d1764a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.415836 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-kube-api-access-75l9j" (OuterVolumeSpecName: "kube-api-access-75l9j") pod "b93ada37-fdd7-451f-9df2-3fc3c8d1764a" (UID: "b93ada37-fdd7-451f-9df2-3fc3c8d1764a"). InnerVolumeSpecName "kube-api-access-75l9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.451987 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b93ada37-fdd7-451f-9df2-3fc3c8d1764a" (UID: "b93ada37-fdd7-451f-9df2-3fc3c8d1764a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.511711 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.511744 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75l9j\" (UniqueName: \"kubernetes.io/projected/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-kube-api-access-75l9j\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:33 crc kubenswrapper[4717]: I1002 14:35:33.511755 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b93ada37-fdd7-451f-9df2-3fc3c8d1764a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:34 crc kubenswrapper[4717]: I1002 14:35:34.260444 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6kb75" Oct 02 14:35:34 crc kubenswrapper[4717]: I1002 14:35:34.289998 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6kb75"] Oct 02 14:35:34 crc kubenswrapper[4717]: I1002 14:35:34.294560 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6kb75"] Oct 02 14:35:34 crc kubenswrapper[4717]: I1002 14:35:34.849182 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" path="/var/lib/kubelet/pods/b93ada37-fdd7-451f-9df2-3fc3c8d1764a/volumes" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.159200 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Oct 02 14:35:38 crc kubenswrapper[4717]: E1002 14:35:38.160113 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerName="extract-utilities" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.160130 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerName="extract-utilities" Oct 02 14:35:38 crc kubenswrapper[4717]: E1002 14:35:38.160149 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerName="extract-content" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.160156 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerName="extract-content" Oct 02 14:35:38 crc kubenswrapper[4717]: E1002 14:35:38.160172 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerName="registry-server" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.160180 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerName="registry-server" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.160298 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="b93ada37-fdd7-451f-9df2-3fc3c8d1764a" containerName="registry-server" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.161061 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.162962 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-default-user" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.164540 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-erlang-cookie" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.164776 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"rabbitmq-plugins-conf" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.164877 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"rabbitmq-server-conf" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.164976 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-server-dockercfg-lnbqs" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.176583 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.277849 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.277938 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.277962 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6c8e897f-fef0-42ff-a151-6323550dcab0-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.278031 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d8287e01-942a-4c00-a573-8e6d8fd8255f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d8287e01-942a-4c00-a573-8e6d8fd8255f\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.278061 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6c8e897f-fef0-42ff-a151-6323550dcab0-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.278095 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6c8e897f-fef0-42ff-a151-6323550dcab0-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.278176 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wbhx\" (UniqueName: \"kubernetes.io/projected/6c8e897f-fef0-42ff-a151-6323550dcab0-kube-api-access-5wbhx\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.278240 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.379500 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wbhx\" (UniqueName: \"kubernetes.io/projected/6c8e897f-fef0-42ff-a151-6323550dcab0-kube-api-access-5wbhx\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.379574 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.379599 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.379621 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.379644 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6c8e897f-fef0-42ff-a151-6323550dcab0-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.379682 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d8287e01-942a-4c00-a573-8e6d8fd8255f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d8287e01-942a-4c00-a573-8e6d8fd8255f\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.379705 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6c8e897f-fef0-42ff-a151-6323550dcab0-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.379742 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6c8e897f-fef0-42ff-a151-6323550dcab0-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.380029 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.380716 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6c8e897f-fef0-42ff-a151-6323550dcab0-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.381021 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.384148 4717 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.384184 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d8287e01-942a-4c00-a573-8e6d8fd8255f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d8287e01-942a-4c00-a573-8e6d8fd8255f\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5796f9d287ec361cf47a9c7c3351ed5ff912d1abdcdf10a8a5b8b36055f7e742/globalmount\"" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.385640 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6c8e897f-fef0-42ff-a151-6323550dcab0-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.387288 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6c8e897f-fef0-42ff-a151-6323550dcab0-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.389104 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6c8e897f-fef0-42ff-a151-6323550dcab0-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.403004 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wbhx\" (UniqueName: \"kubernetes.io/projected/6c8e897f-fef0-42ff-a151-6323550dcab0-kube-api-access-5wbhx\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.407629 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d8287e01-942a-4c00-a573-8e6d8fd8255f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d8287e01-942a-4c00-a573-8e6d8fd8255f\") pod \"rabbitmq-server-0\" (UID: \"6c8e897f-fef0-42ff-a151-6323550dcab0\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.476977 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:35:38 crc kubenswrapper[4717]: I1002 14:35:38.684611 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Oct 02 14:35:39 crc kubenswrapper[4717]: I1002 14:35:39.291294 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"6c8e897f-fef0-42ff-a151-6323550dcab0","Type":"ContainerStarted","Data":"ef77c9589cdfb86f0458fd85ea771cc1c5d796913f4a3c15920136d4d92f47ac"} Oct 02 14:35:40 crc kubenswrapper[4717]: I1002 14:35:40.543401 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-8shh4"] Oct 02 14:35:40 crc kubenswrapper[4717]: I1002 14:35:40.544652 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:40 crc kubenswrapper[4717]: I1002 14:35:40.552224 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-8shh4"] Oct 02 14:35:40 crc kubenswrapper[4717]: I1002 14:35:40.553620 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-7cg54" Oct 02 14:35:40 crc kubenswrapper[4717]: I1002 14:35:40.608538 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4fgt\" (UniqueName: \"kubernetes.io/projected/b03c8168-fa0b-4435-8216-08f4f71d90a7-kube-api-access-w4fgt\") pod \"keystone-operator-index-8shh4\" (UID: \"b03c8168-fa0b-4435-8216-08f4f71d90a7\") " pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:40 crc kubenswrapper[4717]: I1002 14:35:40.709813 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4fgt\" (UniqueName: \"kubernetes.io/projected/b03c8168-fa0b-4435-8216-08f4f71d90a7-kube-api-access-w4fgt\") pod \"keystone-operator-index-8shh4\" (UID: \"b03c8168-fa0b-4435-8216-08f4f71d90a7\") " pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:40 crc kubenswrapper[4717]: I1002 14:35:40.728195 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4fgt\" (UniqueName: \"kubernetes.io/projected/b03c8168-fa0b-4435-8216-08f4f71d90a7-kube-api-access-w4fgt\") pod \"keystone-operator-index-8shh4\" (UID: \"b03c8168-fa0b-4435-8216-08f4f71d90a7\") " pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:40 crc kubenswrapper[4717]: I1002 14:35:40.877195 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:42 crc kubenswrapper[4717]: I1002 14:35:42.668128 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-8shh4"] Oct 02 14:35:43 crc kubenswrapper[4717]: I1002 14:35:43.315984 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-8shh4" event={"ID":"b03c8168-fa0b-4435-8216-08f4f71d90a7","Type":"ContainerStarted","Data":"7e6d620f61611f6d9aa018fadc19963c41dd366c323e275fc3cf441f4373b8c9"} Oct 02 14:35:44 crc kubenswrapper[4717]: I1002 14:35:44.323492 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-8shh4" event={"ID":"b03c8168-fa0b-4435-8216-08f4f71d90a7","Type":"ContainerStarted","Data":"2a4d5cbaec939d07da92be142bd2505ee180c2c2f7eeb7a23e598377d5d6d4d4"} Oct 02 14:35:44 crc kubenswrapper[4717]: I1002 14:35:44.336976 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-8shh4" podStartSLOduration=3.462371298 podStartE2EDuration="4.336959631s" podCreationTimestamp="2025-10-02 14:35:40 +0000 UTC" firstStartedPulling="2025-10-02 14:35:43.113858334 +0000 UTC m=+893.965712780" lastFinishedPulling="2025-10-02 14:35:43.988446667 +0000 UTC m=+894.840301113" observedRunningTime="2025-10-02 14:35:44.333632312 +0000 UTC m=+895.185486758" watchObservedRunningTime="2025-10-02 14:35:44.336959631 +0000 UTC m=+895.188814077" Oct 02 14:35:45 crc kubenswrapper[4717]: I1002 14:35:45.330099 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"6c8e897f-fef0-42ff-a151-6323550dcab0","Type":"ContainerStarted","Data":"c51007efe0b7cf37bebf82b915983863334c460510a7291dfaf5ef38e50a30a3"} Oct 02 14:35:48 crc kubenswrapper[4717]: I1002 14:35:48.619861 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:35:48 crc kubenswrapper[4717]: I1002 14:35:48.620252 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:35:50 crc kubenswrapper[4717]: I1002 14:35:50.877986 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:50 crc kubenswrapper[4717]: I1002 14:35:50.878378 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:50 crc kubenswrapper[4717]: I1002 14:35:50.905905 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:51 crc kubenswrapper[4717]: I1002 14:35:51.394233 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-8shh4" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.189352 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn"] Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.191766 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.193919 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dzqss" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.196310 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn"] Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.292910 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-bundle\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.293028 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvpd8\" (UniqueName: \"kubernetes.io/projected/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-kube-api-access-kvpd8\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.293110 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-util\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.394513 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-util\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.394606 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-bundle\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.394645 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvpd8\" (UniqueName: \"kubernetes.io/projected/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-kube-api-access-kvpd8\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.395717 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-bundle\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.395959 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-util\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.416225 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvpd8\" (UniqueName: \"kubernetes.io/projected/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-kube-api-access-kvpd8\") pod \"d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.520759 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:54 crc kubenswrapper[4717]: I1002 14:35:54.938088 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn"] Oct 02 14:35:54 crc kubenswrapper[4717]: W1002 14:35:54.944072 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13be3ae0_c85c_4ff4_937a_fdc536f9e99a.slice/crio-7c05bc0283708de40123f1d0c3970343a178fccfa0c922f5cd90ace36adf7070 WatchSource:0}: Error finding container 7c05bc0283708de40123f1d0c3970343a178fccfa0c922f5cd90ace36adf7070: Status 404 returned error can't find the container with id 7c05bc0283708de40123f1d0c3970343a178fccfa0c922f5cd90ace36adf7070 Oct 02 14:35:55 crc kubenswrapper[4717]: I1002 14:35:55.390408 4717 generic.go:334] "Generic (PLEG): container finished" podID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerID="2bb2f6de113a465b74838641beff1af363e4892422e3487f43acb3c7dc0ff4cb" exitCode=0 Oct 02 14:35:55 crc kubenswrapper[4717]: I1002 14:35:55.390449 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" event={"ID":"13be3ae0-c85c-4ff4-937a-fdc536f9e99a","Type":"ContainerDied","Data":"2bb2f6de113a465b74838641beff1af363e4892422e3487f43acb3c7dc0ff4cb"} Oct 02 14:35:55 crc kubenswrapper[4717]: I1002 14:35:55.390474 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" event={"ID":"13be3ae0-c85c-4ff4-937a-fdc536f9e99a","Type":"ContainerStarted","Data":"7c05bc0283708de40123f1d0c3970343a178fccfa0c922f5cd90ace36adf7070"} Oct 02 14:35:56 crc kubenswrapper[4717]: I1002 14:35:56.398669 4717 generic.go:334] "Generic (PLEG): container finished" podID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerID="d12f8530391704e772c9fdf8546ec5d1531ced211db8722a9b37928ffd9511d9" exitCode=0 Oct 02 14:35:56 crc kubenswrapper[4717]: I1002 14:35:56.398787 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" event={"ID":"13be3ae0-c85c-4ff4-937a-fdc536f9e99a","Type":"ContainerDied","Data":"d12f8530391704e772c9fdf8546ec5d1531ced211db8722a9b37928ffd9511d9"} Oct 02 14:35:57 crc kubenswrapper[4717]: I1002 14:35:57.407057 4717 generic.go:334] "Generic (PLEG): container finished" podID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerID="94954bc18f297f9ad3d63e1232d8abc840b3ac592aa2bc8d2d82898bce44de28" exitCode=0 Oct 02 14:35:57 crc kubenswrapper[4717]: I1002 14:35:57.407161 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" event={"ID":"13be3ae0-c85c-4ff4-937a-fdc536f9e99a","Type":"ContainerDied","Data":"94954bc18f297f9ad3d63e1232d8abc840b3ac592aa2bc8d2d82898bce44de28"} Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.663435 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.862135 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-util\") pod \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.862213 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvpd8\" (UniqueName: \"kubernetes.io/projected/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-kube-api-access-kvpd8\") pod \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.862362 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-bundle\") pod \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\" (UID: \"13be3ae0-c85c-4ff4-937a-fdc536f9e99a\") " Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.864587 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-bundle" (OuterVolumeSpecName: "bundle") pod "13be3ae0-c85c-4ff4-937a-fdc536f9e99a" (UID: "13be3ae0-c85c-4ff4-937a-fdc536f9e99a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.872251 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-kube-api-access-kvpd8" (OuterVolumeSpecName: "kube-api-access-kvpd8") pod "13be3ae0-c85c-4ff4-937a-fdc536f9e99a" (UID: "13be3ae0-c85c-4ff4-937a-fdc536f9e99a"). InnerVolumeSpecName "kube-api-access-kvpd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.877168 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-util" (OuterVolumeSpecName: "util") pod "13be3ae0-c85c-4ff4-937a-fdc536f9e99a" (UID: "13be3ae0-c85c-4ff4-937a-fdc536f9e99a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.964418 4717 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-util\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.964528 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvpd8\" (UniqueName: \"kubernetes.io/projected/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-kube-api-access-kvpd8\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:58 crc kubenswrapper[4717]: I1002 14:35:58.964547 4717 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/13be3ae0-c85c-4ff4-937a-fdc536f9e99a-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:35:59 crc kubenswrapper[4717]: I1002 14:35:59.424845 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" event={"ID":"13be3ae0-c85c-4ff4-937a-fdc536f9e99a","Type":"ContainerDied","Data":"7c05bc0283708de40123f1d0c3970343a178fccfa0c922f5cd90ace36adf7070"} Oct 02 14:35:59 crc kubenswrapper[4717]: I1002 14:35:59.424905 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c05bc0283708de40123f1d0c3970343a178fccfa0c922f5cd90ace36adf7070" Oct 02 14:35:59 crc kubenswrapper[4717]: I1002 14:35:59.425040 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.360598 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l"] Oct 02 14:36:05 crc kubenswrapper[4717]: E1002 14:36:05.361337 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerName="util" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.361351 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerName="util" Oct 02 14:36:05 crc kubenswrapper[4717]: E1002 14:36:05.361364 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerName="pull" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.361369 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerName="pull" Oct 02 14:36:05 crc kubenswrapper[4717]: E1002 14:36:05.361382 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerName="extract" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.361387 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerName="extract" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.361485 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="13be3ae0-c85c-4ff4-937a-fdc536f9e99a" containerName="extract" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.362052 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.364686 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.365484 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-bz5g2" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.382164 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l"] Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.541716 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssbqm\" (UniqueName: \"kubernetes.io/projected/08448dc6-f974-428d-81a3-d205e812f0ee-kube-api-access-ssbqm\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.542113 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/08448dc6-f974-428d-81a3-d205e812f0ee-apiservice-cert\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.542304 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/08448dc6-f974-428d-81a3-d205e812f0ee-webhook-cert\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.643288 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/08448dc6-f974-428d-81a3-d205e812f0ee-apiservice-cert\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.643337 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/08448dc6-f974-428d-81a3-d205e812f0ee-webhook-cert\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.643373 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssbqm\" (UniqueName: \"kubernetes.io/projected/08448dc6-f974-428d-81a3-d205e812f0ee-kube-api-access-ssbqm\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.651552 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/08448dc6-f974-428d-81a3-d205e812f0ee-apiservice-cert\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.652359 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/08448dc6-f974-428d-81a3-d205e812f0ee-webhook-cert\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.666518 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssbqm\" (UniqueName: \"kubernetes.io/projected/08448dc6-f974-428d-81a3-d205e812f0ee-kube-api-access-ssbqm\") pod \"keystone-operator-controller-manager-6bc9db746f-mv89l\" (UID: \"08448dc6-f974-428d-81a3-d205e812f0ee\") " pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.679716 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:05 crc kubenswrapper[4717]: I1002 14:36:05.932939 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l"] Oct 02 14:36:06 crc kubenswrapper[4717]: I1002 14:36:06.470079 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" event={"ID":"08448dc6-f974-428d-81a3-d205e812f0ee","Type":"ContainerStarted","Data":"05fd865dd999096c34775de1ba6ae3004064583df0e4b8f4004a7c2a1862ed38"} Oct 02 14:36:08 crc kubenswrapper[4717]: I1002 14:36:08.482728 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" event={"ID":"08448dc6-f974-428d-81a3-d205e812f0ee","Type":"ContainerStarted","Data":"f9f184fe7cb5af757f8a9fc435fde8dda654a7bff9496216b92b53a3007af345"} Oct 02 14:36:08 crc kubenswrapper[4717]: I1002 14:36:08.483272 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" event={"ID":"08448dc6-f974-428d-81a3-d205e812f0ee","Type":"ContainerStarted","Data":"b380d96e4a9637de376744084258e1ab30ae901b62e9efaf72581696f7fe375d"} Oct 02 14:36:08 crc kubenswrapper[4717]: I1002 14:36:08.484085 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:08 crc kubenswrapper[4717]: I1002 14:36:08.500736 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" podStartSLOduration=1.710918413 podStartE2EDuration="3.500715985s" podCreationTimestamp="2025-10-02 14:36:05 +0000 UTC" firstStartedPulling="2025-10-02 14:36:05.945959375 +0000 UTC m=+916.797813821" lastFinishedPulling="2025-10-02 14:36:07.735756947 +0000 UTC m=+918.587611393" observedRunningTime="2025-10-02 14:36:08.499232985 +0000 UTC m=+919.351087431" watchObservedRunningTime="2025-10-02 14:36:08.500715985 +0000 UTC m=+919.352570431" Oct 02 14:36:15 crc kubenswrapper[4717]: I1002 14:36:15.684505 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-6bc9db746f-mv89l" Oct 02 14:36:16 crc kubenswrapper[4717]: I1002 14:36:16.536083 4717 generic.go:334] "Generic (PLEG): container finished" podID="6c8e897f-fef0-42ff-a151-6323550dcab0" containerID="c51007efe0b7cf37bebf82b915983863334c460510a7291dfaf5ef38e50a30a3" exitCode=0 Oct 02 14:36:16 crc kubenswrapper[4717]: I1002 14:36:16.536131 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"6c8e897f-fef0-42ff-a151-6323550dcab0","Type":"ContainerDied","Data":"c51007efe0b7cf37bebf82b915983863334c460510a7291dfaf5ef38e50a30a3"} Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.022627 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-db-create-9xhfl"] Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.024117 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-9xhfl" Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.032189 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-create-9xhfl"] Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.211351 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hjgx\" (UniqueName: \"kubernetes.io/projected/1ef07b02-7538-48f8-9e40-7117115e2917-kube-api-access-5hjgx\") pod \"keystone-db-create-9xhfl\" (UID: \"1ef07b02-7538-48f8-9e40-7117115e2917\") " pod="glance-kuttl-tests/keystone-db-create-9xhfl" Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.312680 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hjgx\" (UniqueName: \"kubernetes.io/projected/1ef07b02-7538-48f8-9e40-7117115e2917-kube-api-access-5hjgx\") pod \"keystone-db-create-9xhfl\" (UID: \"1ef07b02-7538-48f8-9e40-7117115e2917\") " pod="glance-kuttl-tests/keystone-db-create-9xhfl" Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.345805 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hjgx\" (UniqueName: \"kubernetes.io/projected/1ef07b02-7538-48f8-9e40-7117115e2917-kube-api-access-5hjgx\") pod \"keystone-db-create-9xhfl\" (UID: \"1ef07b02-7538-48f8-9e40-7117115e2917\") " pod="glance-kuttl-tests/keystone-db-create-9xhfl" Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.547952 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"6c8e897f-fef0-42ff-a151-6323550dcab0","Type":"ContainerStarted","Data":"03776427bbdc8d8bbe7db14db59df4f159fb0f46845f1ce9a7dc0fd3664a97fb"} Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.548220 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.575916 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/rabbitmq-server-0" podStartSLOduration=35.480566668 podStartE2EDuration="40.575897069s" podCreationTimestamp="2025-10-02 14:35:37 +0000 UTC" firstStartedPulling="2025-10-02 14:35:38.691877283 +0000 UTC m=+889.543731719" lastFinishedPulling="2025-10-02 14:35:43.787207654 +0000 UTC m=+894.639062120" observedRunningTime="2025-10-02 14:36:17.572342773 +0000 UTC m=+928.424197219" watchObservedRunningTime="2025-10-02 14:36:17.575897069 +0000 UTC m=+928.427751515" Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.640998 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-9xhfl" Oct 02 14:36:17 crc kubenswrapper[4717]: I1002 14:36:17.908439 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-create-9xhfl"] Oct 02 14:36:17 crc kubenswrapper[4717]: W1002 14:36:17.916555 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ef07b02_7538_48f8_9e40_7117115e2917.slice/crio-fe82cc234c51dbdcc8a811408e29062b1ecb1fa5e15a7a96083b2e9ae53a1787 WatchSource:0}: Error finding container fe82cc234c51dbdcc8a811408e29062b1ecb1fa5e15a7a96083b2e9ae53a1787: Status 404 returned error can't find the container with id fe82cc234c51dbdcc8a811408e29062b1ecb1fa5e15a7a96083b2e9ae53a1787 Oct 02 14:36:18 crc kubenswrapper[4717]: I1002 14:36:18.555173 4717 generic.go:334] "Generic (PLEG): container finished" podID="1ef07b02-7538-48f8-9e40-7117115e2917" containerID="dabe52373d5434486d68aca21f722be9dfeb38ee7efb8fd4f9b6b3fc2b7a523d" exitCode=0 Oct 02 14:36:18 crc kubenswrapper[4717]: I1002 14:36:18.555299 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-9xhfl" event={"ID":"1ef07b02-7538-48f8-9e40-7117115e2917","Type":"ContainerDied","Data":"dabe52373d5434486d68aca21f722be9dfeb38ee7efb8fd4f9b6b3fc2b7a523d"} Oct 02 14:36:18 crc kubenswrapper[4717]: I1002 14:36:18.555368 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-9xhfl" event={"ID":"1ef07b02-7538-48f8-9e40-7117115e2917","Type":"ContainerStarted","Data":"fe82cc234c51dbdcc8a811408e29062b1ecb1fa5e15a7a96083b2e9ae53a1787"} Oct 02 14:36:18 crc kubenswrapper[4717]: I1002 14:36:18.619824 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:36:18 crc kubenswrapper[4717]: I1002 14:36:18.619879 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.741693 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-dcw5b"] Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.742661 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-dcw5b" Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.744548 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-index-dockercfg-pkwvx" Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.751180 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-dcw5b"] Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.821485 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-9xhfl" Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.844991 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdzhv\" (UniqueName: \"kubernetes.io/projected/cc9307e4-0c53-446c-a89d-62069ccc7966-kube-api-access-rdzhv\") pod \"horizon-operator-index-dcw5b\" (UID: \"cc9307e4-0c53-446c-a89d-62069ccc7966\") " pod="openstack-operators/horizon-operator-index-dcw5b" Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.946546 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hjgx\" (UniqueName: \"kubernetes.io/projected/1ef07b02-7538-48f8-9e40-7117115e2917-kube-api-access-5hjgx\") pod \"1ef07b02-7538-48f8-9e40-7117115e2917\" (UID: \"1ef07b02-7538-48f8-9e40-7117115e2917\") " Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.946798 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdzhv\" (UniqueName: \"kubernetes.io/projected/cc9307e4-0c53-446c-a89d-62069ccc7966-kube-api-access-rdzhv\") pod \"horizon-operator-index-dcw5b\" (UID: \"cc9307e4-0c53-446c-a89d-62069ccc7966\") " pod="openstack-operators/horizon-operator-index-dcw5b" Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.951894 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ef07b02-7538-48f8-9e40-7117115e2917-kube-api-access-5hjgx" (OuterVolumeSpecName: "kube-api-access-5hjgx") pod "1ef07b02-7538-48f8-9e40-7117115e2917" (UID: "1ef07b02-7538-48f8-9e40-7117115e2917"). InnerVolumeSpecName "kube-api-access-5hjgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:36:19 crc kubenswrapper[4717]: I1002 14:36:19.964544 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdzhv\" (UniqueName: \"kubernetes.io/projected/cc9307e4-0c53-446c-a89d-62069ccc7966-kube-api-access-rdzhv\") pod \"horizon-operator-index-dcw5b\" (UID: \"cc9307e4-0c53-446c-a89d-62069ccc7966\") " pod="openstack-operators/horizon-operator-index-dcw5b" Oct 02 14:36:20 crc kubenswrapper[4717]: I1002 14:36:20.048614 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hjgx\" (UniqueName: \"kubernetes.io/projected/1ef07b02-7538-48f8-9e40-7117115e2917-kube-api-access-5hjgx\") on node \"crc\" DevicePath \"\"" Oct 02 14:36:20 crc kubenswrapper[4717]: I1002 14:36:20.059653 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-dcw5b" Oct 02 14:36:20 crc kubenswrapper[4717]: I1002 14:36:20.255026 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-dcw5b"] Oct 02 14:36:20 crc kubenswrapper[4717]: W1002 14:36:20.262216 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc9307e4_0c53_446c_a89d_62069ccc7966.slice/crio-61fe8baac34057e4983ab953e181110461520474bfce3f416c368d2a1655e416 WatchSource:0}: Error finding container 61fe8baac34057e4983ab953e181110461520474bfce3f416c368d2a1655e416: Status 404 returned error can't find the container with id 61fe8baac34057e4983ab953e181110461520474bfce3f416c368d2a1655e416 Oct 02 14:36:20 crc kubenswrapper[4717]: I1002 14:36:20.571676 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-dcw5b" event={"ID":"cc9307e4-0c53-446c-a89d-62069ccc7966","Type":"ContainerStarted","Data":"61fe8baac34057e4983ab953e181110461520474bfce3f416c368d2a1655e416"} Oct 02 14:36:20 crc kubenswrapper[4717]: I1002 14:36:20.574221 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-9xhfl" event={"ID":"1ef07b02-7538-48f8-9e40-7117115e2917","Type":"ContainerDied","Data":"fe82cc234c51dbdcc8a811408e29062b1ecb1fa5e15a7a96083b2e9ae53a1787"} Oct 02 14:36:20 crc kubenswrapper[4717]: I1002 14:36:20.574269 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-9xhfl" Oct 02 14:36:20 crc kubenswrapper[4717]: I1002 14:36:20.574275 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe82cc234c51dbdcc8a811408e29062b1ecb1fa5e15a7a96083b2e9ae53a1787" Oct 02 14:36:21 crc kubenswrapper[4717]: I1002 14:36:21.582899 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-dcw5b" event={"ID":"cc9307e4-0c53-446c-a89d-62069ccc7966","Type":"ContainerStarted","Data":"4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3"} Oct 02 14:36:21 crc kubenswrapper[4717]: I1002 14:36:21.599328 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-dcw5b" podStartSLOduration=1.584486664 podStartE2EDuration="2.599306408s" podCreationTimestamp="2025-10-02 14:36:19 +0000 UTC" firstStartedPulling="2025-10-02 14:36:20.264001435 +0000 UTC m=+931.115855881" lastFinishedPulling="2025-10-02 14:36:21.278821179 +0000 UTC m=+932.130675625" observedRunningTime="2025-10-02 14:36:21.596491212 +0000 UTC m=+932.448345758" watchObservedRunningTime="2025-10-02 14:36:21.599306408 +0000 UTC m=+932.451160854" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.543800 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-8phkn"] Oct 02 14:36:23 crc kubenswrapper[4717]: E1002 14:36:23.544329 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ef07b02-7538-48f8-9e40-7117115e2917" containerName="mariadb-database-create" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.544342 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ef07b02-7538-48f8-9e40-7117115e2917" containerName="mariadb-database-create" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.544445 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ef07b02-7538-48f8-9e40-7117115e2917" containerName="mariadb-database-create" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.544852 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-8phkn" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.546847 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-index-dockercfg-fc4ck" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.563857 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-8phkn"] Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.696045 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqg6w\" (UniqueName: \"kubernetes.io/projected/be06069c-5aaf-4de2-b611-e56aa40bf9dd-kube-api-access-gqg6w\") pod \"swift-operator-index-8phkn\" (UID: \"be06069c-5aaf-4de2-b611-e56aa40bf9dd\") " pod="openstack-operators/swift-operator-index-8phkn" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.796885 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqg6w\" (UniqueName: \"kubernetes.io/projected/be06069c-5aaf-4de2-b611-e56aa40bf9dd-kube-api-access-gqg6w\") pod \"swift-operator-index-8phkn\" (UID: \"be06069c-5aaf-4de2-b611-e56aa40bf9dd\") " pod="openstack-operators/swift-operator-index-8phkn" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.820076 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqg6w\" (UniqueName: \"kubernetes.io/projected/be06069c-5aaf-4de2-b611-e56aa40bf9dd-kube-api-access-gqg6w\") pod \"swift-operator-index-8phkn\" (UID: \"be06069c-5aaf-4de2-b611-e56aa40bf9dd\") " pod="openstack-operators/swift-operator-index-8phkn" Oct 02 14:36:23 crc kubenswrapper[4717]: I1002 14:36:23.862608 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-8phkn" Oct 02 14:36:24 crc kubenswrapper[4717]: I1002 14:36:24.282751 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-8phkn"] Oct 02 14:36:24 crc kubenswrapper[4717]: I1002 14:36:24.601167 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-8phkn" event={"ID":"be06069c-5aaf-4de2-b611-e56aa40bf9dd","Type":"ContainerStarted","Data":"3e14590dc9ca8160f2f6e1932e83cf4908c25f7a28cc69576b559c3c1fa6b079"} Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.142321 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-dcw5b"] Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.142541 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/horizon-operator-index-dcw5b" podUID="cc9307e4-0c53-446c-a89d-62069ccc7966" containerName="registry-server" containerID="cri-o://4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3" gracePeriod=2 Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.527731 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-dcw5b" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.609136 4717 generic.go:334] "Generic (PLEG): container finished" podID="cc9307e4-0c53-446c-a89d-62069ccc7966" containerID="4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3" exitCode=0 Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.609183 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-dcw5b" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.609187 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-dcw5b" event={"ID":"cc9307e4-0c53-446c-a89d-62069ccc7966","Type":"ContainerDied","Data":"4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3"} Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.609216 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-dcw5b" event={"ID":"cc9307e4-0c53-446c-a89d-62069ccc7966","Type":"ContainerDied","Data":"61fe8baac34057e4983ab953e181110461520474bfce3f416c368d2a1655e416"} Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.609238 4717 scope.go:117] "RemoveContainer" containerID="4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.621200 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdzhv\" (UniqueName: \"kubernetes.io/projected/cc9307e4-0c53-446c-a89d-62069ccc7966-kube-api-access-rdzhv\") pod \"cc9307e4-0c53-446c-a89d-62069ccc7966\" (UID: \"cc9307e4-0c53-446c-a89d-62069ccc7966\") " Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.623515 4717 scope.go:117] "RemoveContainer" containerID="4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3" Oct 02 14:36:25 crc kubenswrapper[4717]: E1002 14:36:25.624512 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3\": container with ID starting with 4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3 not found: ID does not exist" containerID="4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.624577 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3"} err="failed to get container status \"4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3\": rpc error: code = NotFound desc = could not find container \"4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3\": container with ID starting with 4ce64de1a3226ba9d5648b43d0d65efd35cc6e1689a983997a3857e45d97ecb3 not found: ID does not exist" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.626068 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc9307e4-0c53-446c-a89d-62069ccc7966-kube-api-access-rdzhv" (OuterVolumeSpecName: "kube-api-access-rdzhv") pod "cc9307e4-0c53-446c-a89d-62069ccc7966" (UID: "cc9307e4-0c53-446c-a89d-62069ccc7966"). InnerVolumeSpecName "kube-api-access-rdzhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.722504 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdzhv\" (UniqueName: \"kubernetes.io/projected/cc9307e4-0c53-446c-a89d-62069ccc7966-kube-api-access-rdzhv\") on node \"crc\" DevicePath \"\"" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.741773 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-br82z"] Oct 02 14:36:25 crc kubenswrapper[4717]: E1002 14:36:25.742083 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc9307e4-0c53-446c-a89d-62069ccc7966" containerName="registry-server" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.742094 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc9307e4-0c53-446c-a89d-62069ccc7966" containerName="registry-server" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.742197 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc9307e4-0c53-446c-a89d-62069ccc7966" containerName="registry-server" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.742614 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.750986 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-br82z"] Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.824136 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jvwh\" (UniqueName: \"kubernetes.io/projected/08984fe8-ca53-44ac-8958-0ea63894ff61-kube-api-access-8jvwh\") pod \"horizon-operator-index-br82z\" (UID: \"08984fe8-ca53-44ac-8958-0ea63894ff61\") " pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.925134 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jvwh\" (UniqueName: \"kubernetes.io/projected/08984fe8-ca53-44ac-8958-0ea63894ff61-kube-api-access-8jvwh\") pod \"horizon-operator-index-br82z\" (UID: \"08984fe8-ca53-44ac-8958-0ea63894ff61\") " pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.935239 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-dcw5b"] Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.937868 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/horizon-operator-index-dcw5b"] Oct 02 14:36:25 crc kubenswrapper[4717]: I1002 14:36:25.944222 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jvwh\" (UniqueName: \"kubernetes.io/projected/08984fe8-ca53-44ac-8958-0ea63894ff61-kube-api-access-8jvwh\") pod \"horizon-operator-index-br82z\" (UID: \"08984fe8-ca53-44ac-8958-0ea63894ff61\") " pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:26 crc kubenswrapper[4717]: I1002 14:36:26.058881 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:26 crc kubenswrapper[4717]: I1002 14:36:26.452046 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-br82z"] Oct 02 14:36:26 crc kubenswrapper[4717]: I1002 14:36:26.845880 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc9307e4-0c53-446c-a89d-62069ccc7966" path="/var/lib/kubelet/pods/cc9307e4-0c53-446c-a89d-62069ccc7966/volumes" Oct 02 14:36:26 crc kubenswrapper[4717]: I1002 14:36:26.904497 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-a1a1-account-create-wq2qg"] Oct 02 14:36:26 crc kubenswrapper[4717]: I1002 14:36:26.906183 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" Oct 02 14:36:26 crc kubenswrapper[4717]: I1002 14:36:26.912632 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-db-secret" Oct 02 14:36:26 crc kubenswrapper[4717]: I1002 14:36:26.919841 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-a1a1-account-create-wq2qg"] Oct 02 14:36:27 crc kubenswrapper[4717]: I1002 14:36:27.048783 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gmfs\" (UniqueName: \"kubernetes.io/projected/5005cfbf-4a42-4330-a56d-6ff510cf500b-kube-api-access-5gmfs\") pod \"keystone-a1a1-account-create-wq2qg\" (UID: \"5005cfbf-4a42-4330-a56d-6ff510cf500b\") " pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" Oct 02 14:36:27 crc kubenswrapper[4717]: I1002 14:36:27.150662 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gmfs\" (UniqueName: \"kubernetes.io/projected/5005cfbf-4a42-4330-a56d-6ff510cf500b-kube-api-access-5gmfs\") pod \"keystone-a1a1-account-create-wq2qg\" (UID: \"5005cfbf-4a42-4330-a56d-6ff510cf500b\") " pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" Oct 02 14:36:27 crc kubenswrapper[4717]: I1002 14:36:27.176423 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gmfs\" (UniqueName: \"kubernetes.io/projected/5005cfbf-4a42-4330-a56d-6ff510cf500b-kube-api-access-5gmfs\") pod \"keystone-a1a1-account-create-wq2qg\" (UID: \"5005cfbf-4a42-4330-a56d-6ff510cf500b\") " pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" Oct 02 14:36:27 crc kubenswrapper[4717]: I1002 14:36:27.232421 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" Oct 02 14:36:28 crc kubenswrapper[4717]: I1002 14:36:28.481206 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 02 14:36:30 crc kubenswrapper[4717]: I1002 14:36:30.339524 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-8phkn"] Oct 02 14:36:31 crc kubenswrapper[4717]: I1002 14:36:31.150108 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-w94s9"] Oct 02 14:36:31 crc kubenswrapper[4717]: I1002 14:36:31.151755 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:31 crc kubenswrapper[4717]: I1002 14:36:31.166411 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-w94s9"] Oct 02 14:36:31 crc kubenswrapper[4717]: I1002 14:36:31.310214 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgzfp\" (UniqueName: \"kubernetes.io/projected/60443082-f42b-4c42-b976-eaccbfeaabfc-kube-api-access-pgzfp\") pod \"swift-operator-index-w94s9\" (UID: \"60443082-f42b-4c42-b976-eaccbfeaabfc\") " pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:31 crc kubenswrapper[4717]: I1002 14:36:31.422637 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgzfp\" (UniqueName: \"kubernetes.io/projected/60443082-f42b-4c42-b976-eaccbfeaabfc-kube-api-access-pgzfp\") pod \"swift-operator-index-w94s9\" (UID: \"60443082-f42b-4c42-b976-eaccbfeaabfc\") " pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:31 crc kubenswrapper[4717]: I1002 14:36:31.443660 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgzfp\" (UniqueName: \"kubernetes.io/projected/60443082-f42b-4c42-b976-eaccbfeaabfc-kube-api-access-pgzfp\") pod \"swift-operator-index-w94s9\" (UID: \"60443082-f42b-4c42-b976-eaccbfeaabfc\") " pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:31 crc kubenswrapper[4717]: I1002 14:36:31.486673 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:37 crc kubenswrapper[4717]: I1002 14:36:37.703342 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-br82z" event={"ID":"08984fe8-ca53-44ac-8958-0ea63894ff61","Type":"ContainerStarted","Data":"aeeed48c94636ddf229077df6fea76c994c382770979b033c725ac795ae12f0e"} Oct 02 14:36:38 crc kubenswrapper[4717]: I1002 14:36:38.555312 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-w94s9"] Oct 02 14:36:38 crc kubenswrapper[4717]: I1002 14:36:38.682103 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-a1a1-account-create-wq2qg"] Oct 02 14:36:38 crc kubenswrapper[4717]: W1002 14:36:38.687095 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5005cfbf_4a42_4330_a56d_6ff510cf500b.slice/crio-01ac23545424418509b2c49749e3f88a3c58f7d2f75752fbc67e2222214d8723 WatchSource:0}: Error finding container 01ac23545424418509b2c49749e3f88a3c58f7d2f75752fbc67e2222214d8723: Status 404 returned error can't find the container with id 01ac23545424418509b2c49749e3f88a3c58f7d2f75752fbc67e2222214d8723 Oct 02 14:36:38 crc kubenswrapper[4717]: I1002 14:36:38.709290 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" event={"ID":"5005cfbf-4a42-4330-a56d-6ff510cf500b","Type":"ContainerStarted","Data":"01ac23545424418509b2c49749e3f88a3c58f7d2f75752fbc67e2222214d8723"} Oct 02 14:36:38 crc kubenswrapper[4717]: I1002 14:36:38.710233 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-w94s9" event={"ID":"60443082-f42b-4c42-b976-eaccbfeaabfc","Type":"ContainerStarted","Data":"d97613974d5f1c0f86c4637b5717f2469af544cb2aa83ef7e4fefaf260cfdde6"} Oct 02 14:36:38 crc kubenswrapper[4717]: E1002 14:36:38.783530 4717 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator-index:latest" Oct 02 14:36:38 crc kubenswrapper[4717]: E1002 14:36:38.784095 4717 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:registry-server,Image:quay.io/openstack-k8s-operators/swift-operator-index:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:grpc,HostPort:0,ContainerPort:50051,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gqg6w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[grpc_health_probe -addr=:50051],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-index-8phkn_openstack-operators(be06069c-5aaf-4de2-b611-e56aa40bf9dd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 02 14:36:38 crc kubenswrapper[4717]: E1002 14:36:38.785776 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"registry-server\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-index-8phkn" podUID="be06069c-5aaf-4de2-b611-e56aa40bf9dd" Oct 02 14:36:39 crc kubenswrapper[4717]: I1002 14:36:39.717182 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" event={"ID":"5005cfbf-4a42-4330-a56d-6ff510cf500b","Type":"ContainerStarted","Data":"72d3d1944d761478c90857d7e01384d6d42869bcc1349cb788d5555467f24bbf"} Oct 02 14:36:39 crc kubenswrapper[4717]: I1002 14:36:39.731118 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" podStartSLOduration=13.731099914 podStartE2EDuration="13.731099914s" podCreationTimestamp="2025-10-02 14:36:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:36:39.730232262 +0000 UTC m=+950.582086708" watchObservedRunningTime="2025-10-02 14:36:39.731099914 +0000 UTC m=+950.582954360" Oct 02 14:36:39 crc kubenswrapper[4717]: I1002 14:36:39.977397 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-8phkn" Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.151016 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqg6w\" (UniqueName: \"kubernetes.io/projected/be06069c-5aaf-4de2-b611-e56aa40bf9dd-kube-api-access-gqg6w\") pod \"be06069c-5aaf-4de2-b611-e56aa40bf9dd\" (UID: \"be06069c-5aaf-4de2-b611-e56aa40bf9dd\") " Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.158277 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be06069c-5aaf-4de2-b611-e56aa40bf9dd-kube-api-access-gqg6w" (OuterVolumeSpecName: "kube-api-access-gqg6w") pod "be06069c-5aaf-4de2-b611-e56aa40bf9dd" (UID: "be06069c-5aaf-4de2-b611-e56aa40bf9dd"). InnerVolumeSpecName "kube-api-access-gqg6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.252807 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqg6w\" (UniqueName: \"kubernetes.io/projected/be06069c-5aaf-4de2-b611-e56aa40bf9dd-kube-api-access-gqg6w\") on node \"crc\" DevicePath \"\"" Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.724017 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-br82z" event={"ID":"08984fe8-ca53-44ac-8958-0ea63894ff61","Type":"ContainerStarted","Data":"5ca1d4534135bd74b4093c6e71933b57c9f244ae67cfc4914c28835eda1d141b"} Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.725358 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-8phkn" event={"ID":"be06069c-5aaf-4de2-b611-e56aa40bf9dd","Type":"ContainerDied","Data":"3e14590dc9ca8160f2f6e1932e83cf4908c25f7a28cc69576b559c3c1fa6b079"} Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.725377 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-8phkn" Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.726417 4717 generic.go:334] "Generic (PLEG): container finished" podID="5005cfbf-4a42-4330-a56d-6ff510cf500b" containerID="72d3d1944d761478c90857d7e01384d6d42869bcc1349cb788d5555467f24bbf" exitCode=0 Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.726441 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" event={"ID":"5005cfbf-4a42-4330-a56d-6ff510cf500b","Type":"ContainerDied","Data":"72d3d1944d761478c90857d7e01384d6d42869bcc1349cb788d5555467f24bbf"} Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.742635 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-br82z" podStartSLOduration=13.405418222 podStartE2EDuration="15.742613959s" podCreationTimestamp="2025-10-02 14:36:25 +0000 UTC" firstStartedPulling="2025-10-02 14:36:38.105819957 +0000 UTC m=+948.957674413" lastFinishedPulling="2025-10-02 14:36:40.443015704 +0000 UTC m=+951.294870150" observedRunningTime="2025-10-02 14:36:40.741493519 +0000 UTC m=+951.593347965" watchObservedRunningTime="2025-10-02 14:36:40.742613959 +0000 UTC m=+951.594468435" Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.798126 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-index-8phkn"] Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.798199 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-index-8phkn"] Oct 02 14:36:40 crc kubenswrapper[4717]: I1002 14:36:40.847080 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be06069c-5aaf-4de2-b611-e56aa40bf9dd" path="/var/lib/kubelet/pods/be06069c-5aaf-4de2-b611-e56aa40bf9dd/volumes" Oct 02 14:36:41 crc kubenswrapper[4717]: I1002 14:36:41.734389 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-w94s9" event={"ID":"60443082-f42b-4c42-b976-eaccbfeaabfc","Type":"ContainerStarted","Data":"f3fad76a3d24035609fcfb8e05d299062058cab283a2d35d66b2e928e88fc6e5"} Oct 02 14:36:41 crc kubenswrapper[4717]: I1002 14:36:41.756603 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-w94s9" podStartSLOduration=8.639244218 podStartE2EDuration="10.75657299s" podCreationTimestamp="2025-10-02 14:36:31 +0000 UTC" firstStartedPulling="2025-10-02 14:36:38.564596722 +0000 UTC m=+949.416451168" lastFinishedPulling="2025-10-02 14:36:40.681925494 +0000 UTC m=+951.533779940" observedRunningTime="2025-10-02 14:36:41.747455554 +0000 UTC m=+952.599310020" watchObservedRunningTime="2025-10-02 14:36:41.75657299 +0000 UTC m=+952.608427466" Oct 02 14:36:42 crc kubenswrapper[4717]: I1002 14:36:42.019640 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" Oct 02 14:36:42 crc kubenswrapper[4717]: I1002 14:36:42.180426 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gmfs\" (UniqueName: \"kubernetes.io/projected/5005cfbf-4a42-4330-a56d-6ff510cf500b-kube-api-access-5gmfs\") pod \"5005cfbf-4a42-4330-a56d-6ff510cf500b\" (UID: \"5005cfbf-4a42-4330-a56d-6ff510cf500b\") " Oct 02 14:36:42 crc kubenswrapper[4717]: I1002 14:36:42.193336 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5005cfbf-4a42-4330-a56d-6ff510cf500b-kube-api-access-5gmfs" (OuterVolumeSpecName: "kube-api-access-5gmfs") pod "5005cfbf-4a42-4330-a56d-6ff510cf500b" (UID: "5005cfbf-4a42-4330-a56d-6ff510cf500b"). InnerVolumeSpecName "kube-api-access-5gmfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:36:42 crc kubenswrapper[4717]: I1002 14:36:42.281716 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gmfs\" (UniqueName: \"kubernetes.io/projected/5005cfbf-4a42-4330-a56d-6ff510cf500b-kube-api-access-5gmfs\") on node \"crc\" DevicePath \"\"" Oct 02 14:36:42 crc kubenswrapper[4717]: I1002 14:36:42.746368 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" Oct 02 14:36:42 crc kubenswrapper[4717]: I1002 14:36:42.746477 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-a1a1-account-create-wq2qg" event={"ID":"5005cfbf-4a42-4330-a56d-6ff510cf500b","Type":"ContainerDied","Data":"01ac23545424418509b2c49749e3f88a3c58f7d2f75752fbc67e2222214d8723"} Oct 02 14:36:42 crc kubenswrapper[4717]: I1002 14:36:42.746515 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01ac23545424418509b2c49749e3f88a3c58f7d2f75752fbc67e2222214d8723" Oct 02 14:36:46 crc kubenswrapper[4717]: I1002 14:36:46.059530 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:46 crc kubenswrapper[4717]: I1002 14:36:46.059911 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:46 crc kubenswrapper[4717]: I1002 14:36:46.084875 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:46 crc kubenswrapper[4717]: I1002 14:36:46.795567 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-index-br82z" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.469639 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-db-sync-6glqr"] Oct 02 14:36:47 crc kubenswrapper[4717]: E1002 14:36:47.470599 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5005cfbf-4a42-4330-a56d-6ff510cf500b" containerName="mariadb-account-create" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.470693 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5005cfbf-4a42-4330-a56d-6ff510cf500b" containerName="mariadb-account-create" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.470863 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="5005cfbf-4a42-4330-a56d-6ff510cf500b" containerName="mariadb-account-create" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.471354 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.473561 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.474845 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-n7b9s" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.475092 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.475849 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.476559 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-6glqr"] Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.652738 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86d48c9-74ed-4705-8990-075e751746a6-config-data\") pod \"keystone-db-sync-6glqr\" (UID: \"b86d48c9-74ed-4705-8990-075e751746a6\") " pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.652835 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tk7t\" (UniqueName: \"kubernetes.io/projected/b86d48c9-74ed-4705-8990-075e751746a6-kube-api-access-4tk7t\") pod \"keystone-db-sync-6glqr\" (UID: \"b86d48c9-74ed-4705-8990-075e751746a6\") " pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.753858 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tk7t\" (UniqueName: \"kubernetes.io/projected/b86d48c9-74ed-4705-8990-075e751746a6-kube-api-access-4tk7t\") pod \"keystone-db-sync-6glqr\" (UID: \"b86d48c9-74ed-4705-8990-075e751746a6\") " pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.754065 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86d48c9-74ed-4705-8990-075e751746a6-config-data\") pod \"keystone-db-sync-6glqr\" (UID: \"b86d48c9-74ed-4705-8990-075e751746a6\") " pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.767520 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86d48c9-74ed-4705-8990-075e751746a6-config-data\") pod \"keystone-db-sync-6glqr\" (UID: \"b86d48c9-74ed-4705-8990-075e751746a6\") " pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.777592 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tk7t\" (UniqueName: \"kubernetes.io/projected/b86d48c9-74ed-4705-8990-075e751746a6-kube-api-access-4tk7t\") pod \"keystone-db-sync-6glqr\" (UID: \"b86d48c9-74ed-4705-8990-075e751746a6\") " pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:36:47 crc kubenswrapper[4717]: I1002 14:36:47.785865 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.187489 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-6glqr"] Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.194055 4717 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.620195 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.620254 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.620303 4717 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.620892 4717 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dc26c4013ef3dfdebc8448f602fd80cfb07a36d1bd8014fcfc1f4769626fbe3f"} pod="openshift-machine-config-operator/machine-config-daemon-sk55f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.620962 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" containerID="cri-o://dc26c4013ef3dfdebc8448f602fd80cfb07a36d1bd8014fcfc1f4769626fbe3f" gracePeriod=600 Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.786087 4717 generic.go:334] "Generic (PLEG): container finished" podID="405aba30-0ff3-4fca-a5da-09c35263665d" containerID="dc26c4013ef3dfdebc8448f602fd80cfb07a36d1bd8014fcfc1f4769626fbe3f" exitCode=0 Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.786163 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerDied","Data":"dc26c4013ef3dfdebc8448f602fd80cfb07a36d1bd8014fcfc1f4769626fbe3f"} Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.786219 4717 scope.go:117] "RemoveContainer" containerID="70c25bc08fc344bcf8bb59d3376cd774b978162bd5593d10e8b7b82c8502396b" Oct 02 14:36:48 crc kubenswrapper[4717]: I1002 14:36:48.787201 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-6glqr" event={"ID":"b86d48c9-74ed-4705-8990-075e751746a6","Type":"ContainerStarted","Data":"8433706f79a365f3af7a882b5ad11d0139a29fb6c4437dd7e5bb7de314482697"} Oct 02 14:36:49 crc kubenswrapper[4717]: I1002 14:36:49.795900 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"52384e0f02272cfcc1d37cbacff5ecff9bba1bac6264b24fc5eae60641b49d30"} Oct 02 14:36:51 crc kubenswrapper[4717]: I1002 14:36:51.487396 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:51 crc kubenswrapper[4717]: I1002 14:36:51.487818 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:51 crc kubenswrapper[4717]: I1002 14:36:51.514375 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:51 crc kubenswrapper[4717]: I1002 14:36:51.836601 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-index-w94s9" Oct 02 14:36:55 crc kubenswrapper[4717]: I1002 14:36:55.833331 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-6glqr" event={"ID":"b86d48c9-74ed-4705-8990-075e751746a6","Type":"ContainerStarted","Data":"a2accff64639b592679f5a28c4d69a7ec446ca652b42cca2a0e173870fdf5189"} Oct 02 14:36:55 crc kubenswrapper[4717]: I1002 14:36:55.849231 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-db-sync-6glqr" podStartSLOduration=1.446362788 podStartE2EDuration="8.849087211s" podCreationTimestamp="2025-10-02 14:36:47 +0000 UTC" firstStartedPulling="2025-10-02 14:36:48.193738439 +0000 UTC m=+959.045592885" lastFinishedPulling="2025-10-02 14:36:55.596462862 +0000 UTC m=+966.448317308" observedRunningTime="2025-10-02 14:36:55.847816927 +0000 UTC m=+966.699671383" watchObservedRunningTime="2025-10-02 14:36:55.849087211 +0000 UTC m=+966.700941657" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.400672 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq"] Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.402179 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.406039 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dzqss" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.414374 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq"] Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.467202 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-bundle\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.467265 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-util\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.467418 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26297\" (UniqueName: \"kubernetes.io/projected/25007f37-eab2-4a19-aaec-041dccf4a1fa-kube-api-access-26297\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.568376 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-bundle\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.568441 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-util\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.568479 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26297\" (UniqueName: \"kubernetes.io/projected/25007f37-eab2-4a19-aaec-041dccf4a1fa-kube-api-access-26297\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.568886 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-bundle\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.568918 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-util\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.588734 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26297\" (UniqueName: \"kubernetes.io/projected/25007f37-eab2-4a19-aaec-041dccf4a1fa-kube-api-access-26297\") pod \"24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:56 crc kubenswrapper[4717]: I1002 14:36:56.718492 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.131756 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq"] Oct 02 14:36:57 crc kubenswrapper[4717]: W1002 14:36:57.143737 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25007f37_eab2_4a19_aaec_041dccf4a1fa.slice/crio-dc262df6fef77800c57b0f764cf495606ca3ed77a43f220b7f655dfbbee45d2c WatchSource:0}: Error finding container dc262df6fef77800c57b0f764cf495606ca3ed77a43f220b7f655dfbbee45d2c: Status 404 returned error can't find the container with id dc262df6fef77800c57b0f764cf495606ca3ed77a43f220b7f655dfbbee45d2c Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.375145 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx"] Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.376903 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.384284 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx"] Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.480664 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlxfz\" (UniqueName: \"kubernetes.io/projected/d61faefb-44b1-49a1-968c-48bf323a6c54-kube-api-access-mlxfz\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.480741 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-util\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.480792 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-bundle\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.581429 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-bundle\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.581750 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlxfz\" (UniqueName: \"kubernetes.io/projected/d61faefb-44b1-49a1-968c-48bf323a6c54-kube-api-access-mlxfz\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.582686 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-util\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.581887 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-bundle\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.583043 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-util\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.599263 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlxfz\" (UniqueName: \"kubernetes.io/projected/d61faefb-44b1-49a1-968c-48bf323a6c54-kube-api-access-mlxfz\") pod \"c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.721603 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.852041 4717 generic.go:334] "Generic (PLEG): container finished" podID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerID="ffa0b4d6ab49fa1ab0ae24f027e2dd6c46feb84d938e0d0bb186be9ebf705534" exitCode=0 Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.852228 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" event={"ID":"25007f37-eab2-4a19-aaec-041dccf4a1fa","Type":"ContainerDied","Data":"ffa0b4d6ab49fa1ab0ae24f027e2dd6c46feb84d938e0d0bb186be9ebf705534"} Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.852746 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" event={"ID":"25007f37-eab2-4a19-aaec-041dccf4a1fa","Type":"ContainerStarted","Data":"dc262df6fef77800c57b0f764cf495606ca3ed77a43f220b7f655dfbbee45d2c"} Oct 02 14:36:57 crc kubenswrapper[4717]: I1002 14:36:57.942104 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx"] Oct 02 14:36:57 crc kubenswrapper[4717]: W1002 14:36:57.949424 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd61faefb_44b1_49a1_968c_48bf323a6c54.slice/crio-390e0de5695ab3194125bac88419ea3046b1f0bd3dd66cb43287ff1b2db96f8e WatchSource:0}: Error finding container 390e0de5695ab3194125bac88419ea3046b1f0bd3dd66cb43287ff1b2db96f8e: Status 404 returned error can't find the container with id 390e0de5695ab3194125bac88419ea3046b1f0bd3dd66cb43287ff1b2db96f8e Oct 02 14:36:58 crc kubenswrapper[4717]: I1002 14:36:58.867271 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" event={"ID":"25007f37-eab2-4a19-aaec-041dccf4a1fa","Type":"ContainerStarted","Data":"7da1645a9a1b316759d69b717101bfd639e5143f1f65242b80c9ee8dec0d1a8e"} Oct 02 14:36:58 crc kubenswrapper[4717]: I1002 14:36:58.869564 4717 generic.go:334] "Generic (PLEG): container finished" podID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerID="15f49557c521a6688e4e9dce1c030970ca949a583fed4f9121c9f86c3e787162" exitCode=0 Oct 02 14:36:58 crc kubenswrapper[4717]: I1002 14:36:58.869606 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" event={"ID":"d61faefb-44b1-49a1-968c-48bf323a6c54","Type":"ContainerDied","Data":"15f49557c521a6688e4e9dce1c030970ca949a583fed4f9121c9f86c3e787162"} Oct 02 14:36:58 crc kubenswrapper[4717]: I1002 14:36:58.869632 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" event={"ID":"d61faefb-44b1-49a1-968c-48bf323a6c54","Type":"ContainerStarted","Data":"390e0de5695ab3194125bac88419ea3046b1f0bd3dd66cb43287ff1b2db96f8e"} Oct 02 14:36:59 crc kubenswrapper[4717]: I1002 14:36:59.877366 4717 generic.go:334] "Generic (PLEG): container finished" podID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerID="7da1645a9a1b316759d69b717101bfd639e5143f1f65242b80c9ee8dec0d1a8e" exitCode=0 Oct 02 14:36:59 crc kubenswrapper[4717]: I1002 14:36:59.877743 4717 generic.go:334] "Generic (PLEG): container finished" podID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerID="21e5b11327751827e2d1c004391b0281e9aaa5a46d052b90794528cc7aa80ac8" exitCode=0 Oct 02 14:36:59 crc kubenswrapper[4717]: I1002 14:36:59.877471 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" event={"ID":"25007f37-eab2-4a19-aaec-041dccf4a1fa","Type":"ContainerDied","Data":"7da1645a9a1b316759d69b717101bfd639e5143f1f65242b80c9ee8dec0d1a8e"} Oct 02 14:36:59 crc kubenswrapper[4717]: I1002 14:36:59.877808 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" event={"ID":"25007f37-eab2-4a19-aaec-041dccf4a1fa","Type":"ContainerDied","Data":"21e5b11327751827e2d1c004391b0281e9aaa5a46d052b90794528cc7aa80ac8"} Oct 02 14:36:59 crc kubenswrapper[4717]: I1002 14:36:59.880843 4717 generic.go:334] "Generic (PLEG): container finished" podID="b86d48c9-74ed-4705-8990-075e751746a6" containerID="a2accff64639b592679f5a28c4d69a7ec446ca652b42cca2a0e173870fdf5189" exitCode=0 Oct 02 14:36:59 crc kubenswrapper[4717]: I1002 14:36:59.880911 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-6glqr" event={"ID":"b86d48c9-74ed-4705-8990-075e751746a6","Type":"ContainerDied","Data":"a2accff64639b592679f5a28c4d69a7ec446ca652b42cca2a0e173870fdf5189"} Oct 02 14:36:59 crc kubenswrapper[4717]: I1002 14:36:59.882741 4717 generic.go:334] "Generic (PLEG): container finished" podID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerID="84d6e718ce796a98d89db1dc62393a3abac3f72dae0ae811916a005e1f975ea2" exitCode=0 Oct 02 14:36:59 crc kubenswrapper[4717]: I1002 14:36:59.882771 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" event={"ID":"d61faefb-44b1-49a1-968c-48bf323a6c54","Type":"ContainerDied","Data":"84d6e718ce796a98d89db1dc62393a3abac3f72dae0ae811916a005e1f975ea2"} Oct 02 14:37:00 crc kubenswrapper[4717]: I1002 14:37:00.891052 4717 generic.go:334] "Generic (PLEG): container finished" podID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerID="2fd885033b11cc028f289956a69b72d1bc4e5069a24d79a140dfa0a16e0e86f8" exitCode=0 Oct 02 14:37:00 crc kubenswrapper[4717]: I1002 14:37:00.891172 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" event={"ID":"d61faefb-44b1-49a1-968c-48bf323a6c54","Type":"ContainerDied","Data":"2fd885033b11cc028f289956a69b72d1bc4e5069a24d79a140dfa0a16e0e86f8"} Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.194506 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.198975 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.238476 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-util\") pod \"25007f37-eab2-4a19-aaec-041dccf4a1fa\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.238533 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tk7t\" (UniqueName: \"kubernetes.io/projected/b86d48c9-74ed-4705-8990-075e751746a6-kube-api-access-4tk7t\") pod \"b86d48c9-74ed-4705-8990-075e751746a6\" (UID: \"b86d48c9-74ed-4705-8990-075e751746a6\") " Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.238633 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-bundle\") pod \"25007f37-eab2-4a19-aaec-041dccf4a1fa\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.238651 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26297\" (UniqueName: \"kubernetes.io/projected/25007f37-eab2-4a19-aaec-041dccf4a1fa-kube-api-access-26297\") pod \"25007f37-eab2-4a19-aaec-041dccf4a1fa\" (UID: \"25007f37-eab2-4a19-aaec-041dccf4a1fa\") " Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.238706 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86d48c9-74ed-4705-8990-075e751746a6-config-data\") pod \"b86d48c9-74ed-4705-8990-075e751746a6\" (UID: \"b86d48c9-74ed-4705-8990-075e751746a6\") " Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.239985 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-bundle" (OuterVolumeSpecName: "bundle") pod "25007f37-eab2-4a19-aaec-041dccf4a1fa" (UID: "25007f37-eab2-4a19-aaec-041dccf4a1fa"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.244586 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b86d48c9-74ed-4705-8990-075e751746a6-kube-api-access-4tk7t" (OuterVolumeSpecName: "kube-api-access-4tk7t") pod "b86d48c9-74ed-4705-8990-075e751746a6" (UID: "b86d48c9-74ed-4705-8990-075e751746a6"). InnerVolumeSpecName "kube-api-access-4tk7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.245063 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25007f37-eab2-4a19-aaec-041dccf4a1fa-kube-api-access-26297" (OuterVolumeSpecName: "kube-api-access-26297") pod "25007f37-eab2-4a19-aaec-041dccf4a1fa" (UID: "25007f37-eab2-4a19-aaec-041dccf4a1fa"). InnerVolumeSpecName "kube-api-access-26297". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.254585 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-util" (OuterVolumeSpecName: "util") pod "25007f37-eab2-4a19-aaec-041dccf4a1fa" (UID: "25007f37-eab2-4a19-aaec-041dccf4a1fa"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.273984 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b86d48c9-74ed-4705-8990-075e751746a6-config-data" (OuterVolumeSpecName: "config-data") pod "b86d48c9-74ed-4705-8990-075e751746a6" (UID: "b86d48c9-74ed-4705-8990-075e751746a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.339998 4717 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.340047 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26297\" (UniqueName: \"kubernetes.io/projected/25007f37-eab2-4a19-aaec-041dccf4a1fa-kube-api-access-26297\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.340061 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b86d48c9-74ed-4705-8990-075e751746a6-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.340074 4717 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25007f37-eab2-4a19-aaec-041dccf4a1fa-util\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.340087 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tk7t\" (UniqueName: \"kubernetes.io/projected/b86d48c9-74ed-4705-8990-075e751746a6-kube-api-access-4tk7t\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.901478 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" event={"ID":"25007f37-eab2-4a19-aaec-041dccf4a1fa","Type":"ContainerDied","Data":"dc262df6fef77800c57b0f764cf495606ca3ed77a43f220b7f655dfbbee45d2c"} Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.901807 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc262df6fef77800c57b0f764cf495606ca3ed77a43f220b7f655dfbbee45d2c" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.901495 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.902939 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-6glqr" Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.902948 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-6glqr" event={"ID":"b86d48c9-74ed-4705-8990-075e751746a6","Type":"ContainerDied","Data":"8433706f79a365f3af7a882b5ad11d0139a29fb6c4437dd7e5bb7de314482697"} Oct 02 14:37:01 crc kubenswrapper[4717]: I1002 14:37:01.902991 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8433706f79a365f3af7a882b5ad11d0139a29fb6c4437dd7e5bb7de314482697" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.138592 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148329 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-t4m47"] Oct 02 14:37:02 crc kubenswrapper[4717]: E1002 14:37:02.148654 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerName="util" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148674 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerName="util" Oct 02 14:37:02 crc kubenswrapper[4717]: E1002 14:37:02.148688 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerName="pull" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148696 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerName="pull" Oct 02 14:37:02 crc kubenswrapper[4717]: E1002 14:37:02.148713 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b86d48c9-74ed-4705-8990-075e751746a6" containerName="keystone-db-sync" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148721 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b86d48c9-74ed-4705-8990-075e751746a6" containerName="keystone-db-sync" Oct 02 14:37:02 crc kubenswrapper[4717]: E1002 14:37:02.148736 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerName="extract" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148743 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerName="extract" Oct 02 14:37:02 crc kubenswrapper[4717]: E1002 14:37:02.148752 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerName="pull" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148758 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerName="pull" Oct 02 14:37:02 crc kubenswrapper[4717]: E1002 14:37:02.148770 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerName="util" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148779 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerName="util" Oct 02 14:37:02 crc kubenswrapper[4717]: E1002 14:37:02.148794 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerName="extract" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148801 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerName="extract" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148855 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-bundle\") pod \"d61faefb-44b1-49a1-968c-48bf323a6c54\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148921 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-util\") pod \"d61faefb-44b1-49a1-968c-48bf323a6c54\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.148990 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="b86d48c9-74ed-4705-8990-075e751746a6" containerName="keystone-db-sync" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.149007 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="d61faefb-44b1-49a1-968c-48bf323a6c54" containerName="extract" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.149017 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="25007f37-eab2-4a19-aaec-041dccf4a1fa" containerName="extract" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.149519 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.149901 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-bundle" (OuterVolumeSpecName: "bundle") pod "d61faefb-44b1-49a1-968c-48bf323a6c54" (UID: "d61faefb-44b1-49a1-968c-48bf323a6c54"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.153704 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-n7b9s" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.153734 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.153970 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.154132 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.159542 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-t4m47"] Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.175591 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-util" (OuterVolumeSpecName: "util") pod "d61faefb-44b1-49a1-968c-48bf323a6c54" (UID: "d61faefb-44b1-49a1-968c-48bf323a6c54"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.250556 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlxfz\" (UniqueName: \"kubernetes.io/projected/d61faefb-44b1-49a1-968c-48bf323a6c54-kube-api-access-mlxfz\") pod \"d61faefb-44b1-49a1-968c-48bf323a6c54\" (UID: \"d61faefb-44b1-49a1-968c-48bf323a6c54\") " Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.250734 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-fernet-keys\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.250781 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-config-data\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.250814 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-scripts\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.250836 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-credential-keys\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.250853 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz9sv\" (UniqueName: \"kubernetes.io/projected/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-kube-api-access-pz9sv\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.251051 4717 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.251081 4717 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d61faefb-44b1-49a1-968c-48bf323a6c54-util\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.256143 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d61faefb-44b1-49a1-968c-48bf323a6c54-kube-api-access-mlxfz" (OuterVolumeSpecName: "kube-api-access-mlxfz") pod "d61faefb-44b1-49a1-968c-48bf323a6c54" (UID: "d61faefb-44b1-49a1-968c-48bf323a6c54"). InnerVolumeSpecName "kube-api-access-mlxfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.352481 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-fernet-keys\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.353182 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-config-data\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.353245 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-scripts\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.353286 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-credential-keys\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.353312 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz9sv\" (UniqueName: \"kubernetes.io/projected/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-kube-api-access-pz9sv\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.353479 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlxfz\" (UniqueName: \"kubernetes.io/projected/d61faefb-44b1-49a1-968c-48bf323a6c54-kube-api-access-mlxfz\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.356657 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-fernet-keys\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.357209 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-credential-keys\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.357471 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-config-data\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.357727 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-scripts\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.370062 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz9sv\" (UniqueName: \"kubernetes.io/projected/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-kube-api-access-pz9sv\") pod \"keystone-bootstrap-t4m47\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.470804 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.914494 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" event={"ID":"d61faefb-44b1-49a1-968c-48bf323a6c54","Type":"ContainerDied","Data":"390e0de5695ab3194125bac88419ea3046b1f0bd3dd66cb43287ff1b2db96f8e"} Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.915130 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="390e0de5695ab3194125bac88419ea3046b1f0bd3dd66cb43287ff1b2db96f8e" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.915272 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx" Oct 02 14:37:02 crc kubenswrapper[4717]: I1002 14:37:02.938458 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-t4m47"] Oct 02 14:37:02 crc kubenswrapper[4717]: W1002 14:37:02.941756 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ee2cd62_9303_4d6d_9bb6_e66c7e83b29e.slice/crio-e7589fd419a4189b448012a56899a5c47bbf56d9c96f550435a6f6ff0b9f4181 WatchSource:0}: Error finding container e7589fd419a4189b448012a56899a5c47bbf56d9c96f550435a6f6ff0b9f4181: Status 404 returned error can't find the container with id e7589fd419a4189b448012a56899a5c47bbf56d9c96f550435a6f6ff0b9f4181 Oct 02 14:37:03 crc kubenswrapper[4717]: I1002 14:37:03.926148 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" event={"ID":"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e","Type":"ContainerStarted","Data":"7af081b13d8195b13ebfc4c4971e30bc8b4421f86c5883b62bc0edd1c60d9348"} Oct 02 14:37:03 crc kubenswrapper[4717]: I1002 14:37:03.926194 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" event={"ID":"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e","Type":"ContainerStarted","Data":"e7589fd419a4189b448012a56899a5c47bbf56d9c96f550435a6f6ff0b9f4181"} Oct 02 14:37:03 crc kubenswrapper[4717]: I1002 14:37:03.939387 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" podStartSLOduration=1.93937472 podStartE2EDuration="1.93937472s" podCreationTimestamp="2025-10-02 14:37:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:37:03.938326412 +0000 UTC m=+974.790180858" watchObservedRunningTime="2025-10-02 14:37:03.93937472 +0000 UTC m=+974.791229166" Oct 02 14:37:05 crc kubenswrapper[4717]: I1002 14:37:05.940278 4717 generic.go:334] "Generic (PLEG): container finished" podID="3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" containerID="7af081b13d8195b13ebfc4c4971e30bc8b4421f86c5883b62bc0edd1c60d9348" exitCode=0 Oct 02 14:37:05 crc kubenswrapper[4717]: I1002 14:37:05.940324 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" event={"ID":"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e","Type":"ContainerDied","Data":"7af081b13d8195b13ebfc4c4971e30bc8b4421f86c5883b62bc0edd1c60d9348"} Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.246202 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.417433 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-fernet-keys\") pod \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.417861 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-config-data\") pod \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.417977 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-scripts\") pod \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.418034 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-credential-keys\") pod \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.418069 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz9sv\" (UniqueName: \"kubernetes.io/projected/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-kube-api-access-pz9sv\") pod \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\" (UID: \"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e\") " Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.433746 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" (UID: "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.434044 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-scripts" (OuterVolumeSpecName: "scripts") pod "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" (UID: "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.435951 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" (UID: "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.437094 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-config-data" (OuterVolumeSpecName: "config-data") pod "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" (UID: "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.438045 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-kube-api-access-pz9sv" (OuterVolumeSpecName: "kube-api-access-pz9sv") pod "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" (UID: "3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e"). InnerVolumeSpecName "kube-api-access-pz9sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.519737 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.520088 4717 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.520156 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz9sv\" (UniqueName: \"kubernetes.io/projected/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-kube-api-access-pz9sv\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.520224 4717 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.520290 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.964049 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" event={"ID":"3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e","Type":"ContainerDied","Data":"e7589fd419a4189b448012a56899a5c47bbf56d9c96f550435a6f6ff0b9f4181"} Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.964095 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7589fd419a4189b448012a56899a5c47bbf56d9c96f550435a6f6ff0b9f4181" Oct 02 14:37:07 crc kubenswrapper[4717]: I1002 14:37:07.964100 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-t4m47" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.031728 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-79f8987b94-jwn49"] Oct 02 14:37:08 crc kubenswrapper[4717]: E1002 14:37:08.032000 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" containerName="keystone-bootstrap" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.032012 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" containerName="keystone-bootstrap" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.032110 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" containerName="keystone-bootstrap" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.032548 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.034777 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.034807 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.034876 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-n7b9s" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.036310 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.042881 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-79f8987b94-jwn49"] Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.127761 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-credential-keys\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.127829 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-fernet-keys\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.127885 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf4kz\" (UniqueName: \"kubernetes.io/projected/53a403d0-5b76-48a7-8992-55c1b84b0d8e-kube-api-access-qf4kz\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.127914 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-scripts\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.127948 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-config-data\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.228580 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-credential-keys\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.228661 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-fernet-keys\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.228692 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf4kz\" (UniqueName: \"kubernetes.io/projected/53a403d0-5b76-48a7-8992-55c1b84b0d8e-kube-api-access-qf4kz\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.228718 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-scripts\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.228737 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-config-data\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.232528 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-credential-keys\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.233479 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-scripts\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.233852 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-config-data\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.233867 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/53a403d0-5b76-48a7-8992-55c1b84b0d8e-fernet-keys\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.246003 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf4kz\" (UniqueName: \"kubernetes.io/projected/53a403d0-5b76-48a7-8992-55c1b84b0d8e-kube-api-access-qf4kz\") pod \"keystone-79f8987b94-jwn49\" (UID: \"53a403d0-5b76-48a7-8992-55c1b84b0d8e\") " pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.348277 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.742662 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-79f8987b94-jwn49"] Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.974948 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" event={"ID":"53a403d0-5b76-48a7-8992-55c1b84b0d8e","Type":"ContainerStarted","Data":"1d5ff20f230967ec8f82aa36a8b4c0633565de34d69c47ffc5f75efb20c4f390"} Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.975342 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.975358 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" event={"ID":"53a403d0-5b76-48a7-8992-55c1b84b0d8e","Type":"ContainerStarted","Data":"125e7ba5de70fa05f7704c9851eb3a8daf4fd53457e31f938972765667531268"} Oct 02 14:37:08 crc kubenswrapper[4717]: I1002 14:37:08.993963 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" podStartSLOduration=0.993942313 podStartE2EDuration="993.942313ms" podCreationTimestamp="2025-10-02 14:37:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:37:08.99008616 +0000 UTC m=+979.841940616" watchObservedRunningTime="2025-10-02 14:37:08.993942313 +0000 UTC m=+979.845796759" Oct 02 14:37:14 crc kubenswrapper[4717]: I1002 14:37:14.924358 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h"] Oct 02 14:37:14 crc kubenswrapper[4717]: I1002 14:37:14.925728 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:14 crc kubenswrapper[4717]: I1002 14:37:14.928375 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-z4nmt" Oct 02 14:37:14 crc kubenswrapper[4717]: I1002 14:37:14.929136 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-service-cert" Oct 02 14:37:14 crc kubenswrapper[4717]: I1002 14:37:14.932178 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h"] Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.122651 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn9rt\" (UniqueName: \"kubernetes.io/projected/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-kube-api-access-jn9rt\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.122965 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-apiservice-cert\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.123093 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-webhook-cert\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.224688 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-webhook-cert\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.225114 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn9rt\" (UniqueName: \"kubernetes.io/projected/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-kube-api-access-jn9rt\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.225165 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-apiservice-cert\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.233699 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-apiservice-cert\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.240800 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-webhook-cert\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.253476 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn9rt\" (UniqueName: \"kubernetes.io/projected/437a77e2-6cb4-4075-b0fa-1f8922bd2c76-kube-api-access-jn9rt\") pod \"horizon-operator-controller-manager-664b44f579-nkj7h\" (UID: \"437a77e2-6cb4-4075-b0fa-1f8922bd2c76\") " pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:15 crc kubenswrapper[4717]: I1002 14:37:15.552191 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:16 crc kubenswrapper[4717]: I1002 14:37:16.042530 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h"] Oct 02 14:37:17 crc kubenswrapper[4717]: I1002 14:37:17.024328 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" event={"ID":"437a77e2-6cb4-4075-b0fa-1f8922bd2c76","Type":"ContainerStarted","Data":"3ec18e8abac1c37349d8ed599864e2f6961775d05bf166d2ece25fe675390249"} Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.044402 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" event={"ID":"437a77e2-6cb4-4075-b0fa-1f8922bd2c76","Type":"ContainerStarted","Data":"0d91950cc6948ff179750885a470af32a99d3347f66303724c0ebaae19b1b516"} Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.044755 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" event={"ID":"437a77e2-6cb4-4075-b0fa-1f8922bd2c76","Type":"ContainerStarted","Data":"66d754237c8ed117941c9bc5016a66b3380039ca97e0b8de8254f9ca10457644"} Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.044778 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.062790 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" podStartSLOduration=2.695094877 podStartE2EDuration="6.062773058s" podCreationTimestamp="2025-10-02 14:37:14 +0000 UTC" firstStartedPulling="2025-10-02 14:37:16.066127616 +0000 UTC m=+986.917982062" lastFinishedPulling="2025-10-02 14:37:19.433805797 +0000 UTC m=+990.285660243" observedRunningTime="2025-10-02 14:37:20.058681687 +0000 UTC m=+990.910536133" watchObservedRunningTime="2025-10-02 14:37:20.062773058 +0000 UTC m=+990.914627504" Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.735986 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc"] Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.737012 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.745952 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-service-cert" Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.749854 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-x2jhx" Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.807993 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc"] Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.906616 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19ac5c12-8b42-4f73-902a-dfcc557f8054-webhook-cert\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.906764 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19ac5c12-8b42-4f73-902a-dfcc557f8054-apiservice-cert\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:20 crc kubenswrapper[4717]: I1002 14:37:20.906807 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcwzh\" (UniqueName: \"kubernetes.io/projected/19ac5c12-8b42-4f73-902a-dfcc557f8054-kube-api-access-mcwzh\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:21 crc kubenswrapper[4717]: I1002 14:37:21.008080 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19ac5c12-8b42-4f73-902a-dfcc557f8054-apiservice-cert\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:21 crc kubenswrapper[4717]: I1002 14:37:21.008134 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcwzh\" (UniqueName: \"kubernetes.io/projected/19ac5c12-8b42-4f73-902a-dfcc557f8054-kube-api-access-mcwzh\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:21 crc kubenswrapper[4717]: I1002 14:37:21.008163 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19ac5c12-8b42-4f73-902a-dfcc557f8054-webhook-cert\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:21 crc kubenswrapper[4717]: I1002 14:37:21.014782 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/19ac5c12-8b42-4f73-902a-dfcc557f8054-webhook-cert\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:21 crc kubenswrapper[4717]: I1002 14:37:21.014782 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/19ac5c12-8b42-4f73-902a-dfcc557f8054-apiservice-cert\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:21 crc kubenswrapper[4717]: I1002 14:37:21.030519 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcwzh\" (UniqueName: \"kubernetes.io/projected/19ac5c12-8b42-4f73-902a-dfcc557f8054-kube-api-access-mcwzh\") pod \"swift-operator-controller-manager-7cf97c46f6-tw6bc\" (UID: \"19ac5c12-8b42-4f73-902a-dfcc557f8054\") " pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:21 crc kubenswrapper[4717]: I1002 14:37:21.059656 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:21 crc kubenswrapper[4717]: I1002 14:37:21.449097 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc"] Oct 02 14:37:21 crc kubenswrapper[4717]: W1002 14:37:21.460310 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19ac5c12_8b42_4f73_902a_dfcc557f8054.slice/crio-aa60e73709f19df41d3a661d0eaa29dbde26a9a31f48f53bd0c857bc06b56934 WatchSource:0}: Error finding container aa60e73709f19df41d3a661d0eaa29dbde26a9a31f48f53bd0c857bc06b56934: Status 404 returned error can't find the container with id aa60e73709f19df41d3a661d0eaa29dbde26a9a31f48f53bd0c857bc06b56934 Oct 02 14:37:22 crc kubenswrapper[4717]: I1002 14:37:22.057949 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" event={"ID":"19ac5c12-8b42-4f73-902a-dfcc557f8054","Type":"ContainerStarted","Data":"aa60e73709f19df41d3a661d0eaa29dbde26a9a31f48f53bd0c857bc06b56934"} Oct 02 14:37:24 crc kubenswrapper[4717]: I1002 14:37:24.074161 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" event={"ID":"19ac5c12-8b42-4f73-902a-dfcc557f8054","Type":"ContainerStarted","Data":"6051a46aa1d680e8dcd30db2d2b8b14ca2cee19f98991040df09e50ffef847d8"} Oct 02 14:37:24 crc kubenswrapper[4717]: I1002 14:37:24.074721 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" event={"ID":"19ac5c12-8b42-4f73-902a-dfcc557f8054","Type":"ContainerStarted","Data":"8c870a95980ed247361a2d4150f76ec7c4a21a491ba4f532f6c6f3b49e1a3d4e"} Oct 02 14:37:24 crc kubenswrapper[4717]: I1002 14:37:24.074739 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:24 crc kubenswrapper[4717]: I1002 14:37:24.125980 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" podStartSLOduration=2.134658634 podStartE2EDuration="4.12595766s" podCreationTimestamp="2025-10-02 14:37:20 +0000 UTC" firstStartedPulling="2025-10-02 14:37:21.462424693 +0000 UTC m=+992.314279139" lastFinishedPulling="2025-10-02 14:37:23.453723719 +0000 UTC m=+994.305578165" observedRunningTime="2025-10-02 14:37:24.124627654 +0000 UTC m=+994.976482110" watchObservedRunningTime="2025-10-02 14:37:24.12595766 +0000 UTC m=+994.977812106" Oct 02 14:37:25 crc kubenswrapper[4717]: I1002 14:37:25.557014 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-664b44f579-nkj7h" Oct 02 14:37:31 crc kubenswrapper[4717]: I1002 14:37:31.064364 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-7cf97c46f6-tw6bc" Oct 02 14:37:36 crc kubenswrapper[4717]: I1002 14:37:36.944552 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-index-x7hk9"] Oct 02 14:37:36 crc kubenswrapper[4717]: I1002 14:37:36.950508 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-index-x7hk9"] Oct 02 14:37:36 crc kubenswrapper[4717]: I1002 14:37:36.950641 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-x7hk9" Oct 02 14:37:36 crc kubenswrapper[4717]: I1002 14:37:36.953433 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-index-dockercfg-nhrl6" Oct 02 14:37:37 crc kubenswrapper[4717]: I1002 14:37:37.143236 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l2nt\" (UniqueName: \"kubernetes.io/projected/cbfe66b2-314d-4120-a4d7-986b0bf5a85d-kube-api-access-6l2nt\") pod \"glance-operator-index-x7hk9\" (UID: \"cbfe66b2-314d-4120-a4d7-986b0bf5a85d\") " pod="openstack-operators/glance-operator-index-x7hk9" Oct 02 14:37:37 crc kubenswrapper[4717]: I1002 14:37:37.244599 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l2nt\" (UniqueName: \"kubernetes.io/projected/cbfe66b2-314d-4120-a4d7-986b0bf5a85d-kube-api-access-6l2nt\") pod \"glance-operator-index-x7hk9\" (UID: \"cbfe66b2-314d-4120-a4d7-986b0bf5a85d\") " pod="openstack-operators/glance-operator-index-x7hk9" Oct 02 14:37:37 crc kubenswrapper[4717]: I1002 14:37:37.267646 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l2nt\" (UniqueName: \"kubernetes.io/projected/cbfe66b2-314d-4120-a4d7-986b0bf5a85d-kube-api-access-6l2nt\") pod \"glance-operator-index-x7hk9\" (UID: \"cbfe66b2-314d-4120-a4d7-986b0bf5a85d\") " pod="openstack-operators/glance-operator-index-x7hk9" Oct 02 14:37:37 crc kubenswrapper[4717]: I1002 14:37:37.267925 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-x7hk9" Oct 02 14:37:37 crc kubenswrapper[4717]: I1002 14:37:37.676754 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-index-x7hk9"] Oct 02 14:37:37 crc kubenswrapper[4717]: W1002 14:37:37.686523 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcbfe66b2_314d_4120_a4d7_986b0bf5a85d.slice/crio-c8a59d94b772f6c517b2cf0ed6be7e2280427faeca1547adf83be59ae4dcf390 WatchSource:0}: Error finding container c8a59d94b772f6c517b2cf0ed6be7e2280427faeca1547adf83be59ae4dcf390: Status 404 returned error can't find the container with id c8a59d94b772f6c517b2cf0ed6be7e2280427faeca1547adf83be59ae4dcf390 Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.080613 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.084813 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.086734 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-files" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.087118 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-conf" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.087191 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-storage-config-data" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.087770 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-swift-dockercfg-bjsrk" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.101006 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.168138 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-x7hk9" event={"ID":"cbfe66b2-314d-4120-a4d7-986b0bf5a85d","Type":"ContainerStarted","Data":"c8a59d94b772f6c517b2cf0ed6be7e2280427faeca1547adf83be59ae4dcf390"} Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.258360 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.258529 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d98dv\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-kube-api-access-d98dv\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.258598 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a4258a20-8978-4f2a-bb99-793fe396938c-lock\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.258680 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.258727 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a4258a20-8978-4f2a-bb99-793fe396938c-cache\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.360650 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.361051 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d98dv\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-kube-api-access-d98dv\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.361089 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a4258a20-8978-4f2a-bb99-793fe396938c-lock\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.361135 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.361191 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a4258a20-8978-4f2a-bb99-793fe396938c-cache\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.361771 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/a4258a20-8978-4f2a-bb99-793fe396938c-cache\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: E1002 14:37:38.360976 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:38 crc kubenswrapper[4717]: E1002 14:37:38.361834 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 02 14:37:38 crc kubenswrapper[4717]: E1002 14:37:38.361897 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift podName:a4258a20-8978-4f2a-bb99-793fe396938c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:38.861875489 +0000 UTC m=+1009.713729955 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift") pod "swift-storage-0" (UID: "a4258a20-8978-4f2a-bb99-793fe396938c") : configmap "swift-ring-files" not found Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.362802 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/a4258a20-8978-4f2a-bb99-793fe396938c-lock\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.363139 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") device mount path \"/mnt/openstack/pv02\"" pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.384313 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.386251 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d98dv\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-kube-api-access-d98dv\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.597518 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-tcfzv"] Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.598438 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.607116 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-scripts" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.614305 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-config-data" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.615637 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-proxy-config-data" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.615729 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-tcfzv"] Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.669578 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nsfs\" (UniqueName: \"kubernetes.io/projected/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-kube-api-access-2nsfs\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.669651 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-dispersionconf\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.669694 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-etc-swift\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.669711 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-ring-data-devices\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.669743 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-swiftconf\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.669850 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-scripts\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.771108 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-scripts\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.771212 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nsfs\" (UniqueName: \"kubernetes.io/projected/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-kube-api-access-2nsfs\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.771260 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-dispersionconf\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.771301 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-ring-data-devices\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.771322 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-etc-swift\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.771360 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-swiftconf\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.772041 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-etc-swift\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.772557 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-ring-data-devices\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.772647 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-scripts\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.775037 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-dispersionconf\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.789806 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-swiftconf\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.792637 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nsfs\" (UniqueName: \"kubernetes.io/projected/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-kube-api-access-2nsfs\") pod \"swift-ring-rebalance-tcfzv\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.872232 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:38 crc kubenswrapper[4717]: E1002 14:37:38.872469 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:38 crc kubenswrapper[4717]: E1002 14:37:38.872520 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 02 14:37:38 crc kubenswrapper[4717]: E1002 14:37:38.872609 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift podName:a4258a20-8978-4f2a-bb99-793fe396938c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:39.872567051 +0000 UTC m=+1010.724421507 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift") pod "swift-storage-0" (UID: "a4258a20-8978-4f2a-bb99-793fe396938c") : configmap "swift-ring-files" not found Oct 02 14:37:38 crc kubenswrapper[4717]: I1002 14:37:38.918389 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:39 crc kubenswrapper[4717]: I1002 14:37:39.360517 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-tcfzv"] Oct 02 14:37:39 crc kubenswrapper[4717]: W1002 14:37:39.366366 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d807c62_a7f6_43c8_bd1d_826a8bb17e0d.slice/crio-8e00628c680747d37f3be01a4aaa548a8290f077186d804bbbf424744f423474 WatchSource:0}: Error finding container 8e00628c680747d37f3be01a4aaa548a8290f077186d804bbbf424744f423474: Status 404 returned error can't find the container with id 8e00628c680747d37f3be01a4aaa548a8290f077186d804bbbf424744f423474 Oct 02 14:37:39 crc kubenswrapper[4717]: I1002 14:37:39.782910 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/keystone-79f8987b94-jwn49" Oct 02 14:37:39 crc kubenswrapper[4717]: I1002 14:37:39.884952 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:39 crc kubenswrapper[4717]: E1002 14:37:39.885376 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:39 crc kubenswrapper[4717]: E1002 14:37:39.885409 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 02 14:37:39 crc kubenswrapper[4717]: E1002 14:37:39.885472 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift podName:a4258a20-8978-4f2a-bb99-793fe396938c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:41.885452235 +0000 UTC m=+1012.737306761 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift") pod "swift-storage-0" (UID: "a4258a20-8978-4f2a-bb99-793fe396938c") : configmap "swift-ring-files" not found Oct 02 14:37:40 crc kubenswrapper[4717]: I1002 14:37:40.183029 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-x7hk9" event={"ID":"cbfe66b2-314d-4120-a4d7-986b0bf5a85d","Type":"ContainerStarted","Data":"d86da53cf14aaa0871b40b8a04a7b904f73f5b825dbf50bdf5e8c108cd7bfda3"} Oct 02 14:37:40 crc kubenswrapper[4717]: I1002 14:37:40.185167 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" event={"ID":"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d","Type":"ContainerStarted","Data":"8e00628c680747d37f3be01a4aaa548a8290f077186d804bbbf424744f423474"} Oct 02 14:37:40 crc kubenswrapper[4717]: I1002 14:37:40.198230 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-index-x7hk9" podStartSLOduration=2.64810836 podStartE2EDuration="4.198209654s" podCreationTimestamp="2025-10-02 14:37:36 +0000 UTC" firstStartedPulling="2025-10-02 14:37:37.689988677 +0000 UTC m=+1008.541843123" lastFinishedPulling="2025-10-02 14:37:39.240089971 +0000 UTC m=+1010.091944417" observedRunningTime="2025-10-02 14:37:40.196502638 +0000 UTC m=+1011.048357104" watchObservedRunningTime="2025-10-02 14:37:40.198209654 +0000 UTC m=+1011.050064110" Oct 02 14:37:41 crc kubenswrapper[4717]: I1002 14:37:41.914727 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:41 crc kubenswrapper[4717]: E1002 14:37:41.915374 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:41 crc kubenswrapper[4717]: E1002 14:37:41.915392 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 02 14:37:41 crc kubenswrapper[4717]: E1002 14:37:41.915440 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift podName:a4258a20-8978-4f2a-bb99-793fe396938c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:45.915423687 +0000 UTC m=+1016.767278143 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift") pod "swift-storage-0" (UID: "a4258a20-8978-4f2a-bb99-793fe396938c") : configmap "swift-ring-files" not found Oct 02 14:37:42 crc kubenswrapper[4717]: I1002 14:37:42.347522 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/glance-operator-index-x7hk9"] Oct 02 14:37:42 crc kubenswrapper[4717]: I1002 14:37:42.347994 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/glance-operator-index-x7hk9" podUID="cbfe66b2-314d-4120-a4d7-986b0bf5a85d" containerName="registry-server" containerID="cri-o://d86da53cf14aaa0871b40b8a04a7b904f73f5b825dbf50bdf5e8c108cd7bfda3" gracePeriod=2 Oct 02 14:37:42 crc kubenswrapper[4717]: I1002 14:37:42.943974 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-index-fmxm2"] Oct 02 14:37:42 crc kubenswrapper[4717]: I1002 14:37:42.944729 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:42 crc kubenswrapper[4717]: I1002 14:37:42.957130 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-index-fmxm2"] Oct 02 14:37:43 crc kubenswrapper[4717]: I1002 14:37:43.131573 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmtws\" (UniqueName: \"kubernetes.io/projected/ec6951a5-2a08-4902-8a9b-3dd3a43bce05-kube-api-access-xmtws\") pod \"glance-operator-index-fmxm2\" (UID: \"ec6951a5-2a08-4902-8a9b-3dd3a43bce05\") " pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:43 crc kubenswrapper[4717]: I1002 14:37:43.209140 4717 generic.go:334] "Generic (PLEG): container finished" podID="cbfe66b2-314d-4120-a4d7-986b0bf5a85d" containerID="d86da53cf14aaa0871b40b8a04a7b904f73f5b825dbf50bdf5e8c108cd7bfda3" exitCode=0 Oct 02 14:37:43 crc kubenswrapper[4717]: I1002 14:37:43.209187 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-x7hk9" event={"ID":"cbfe66b2-314d-4120-a4d7-986b0bf5a85d","Type":"ContainerDied","Data":"d86da53cf14aaa0871b40b8a04a7b904f73f5b825dbf50bdf5e8c108cd7bfda3"} Oct 02 14:37:43 crc kubenswrapper[4717]: I1002 14:37:43.233074 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmtws\" (UniqueName: \"kubernetes.io/projected/ec6951a5-2a08-4902-8a9b-3dd3a43bce05-kube-api-access-xmtws\") pod \"glance-operator-index-fmxm2\" (UID: \"ec6951a5-2a08-4902-8a9b-3dd3a43bce05\") " pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:43 crc kubenswrapper[4717]: I1002 14:37:43.251079 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmtws\" (UniqueName: \"kubernetes.io/projected/ec6951a5-2a08-4902-8a9b-3dd3a43bce05-kube-api-access-xmtws\") pod \"glance-operator-index-fmxm2\" (UID: \"ec6951a5-2a08-4902-8a9b-3dd3a43bce05\") " pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:43 crc kubenswrapper[4717]: I1002 14:37:43.259986 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.810639 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd"] Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.812091 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.826416 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd"] Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.860249 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-config-data\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.860295 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.860355 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-run-httpd\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.860408 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5dxs\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-kube-api-access-k5dxs\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.860923 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-log-httpd\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.961974 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-config-data\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.962025 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.962060 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-run-httpd\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.962101 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5dxs\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-kube-api-access-k5dxs\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.962153 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-log-httpd\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: E1002 14:37:44.962366 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:44 crc kubenswrapper[4717]: E1002 14:37:44.962389 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd: configmap "swift-ring-files" not found Oct 02 14:37:44 crc kubenswrapper[4717]: E1002 14:37:44.962435 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift podName:33b2243b-a2fc-4332-b6c3-c4c0af731c8c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:45.462418415 +0000 UTC m=+1016.314272861 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift") pod "swift-proxy-59cb459c9f-krwjd" (UID: "33b2243b-a2fc-4332-b6c3-c4c0af731c8c") : configmap "swift-ring-files" not found Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.962731 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-run-httpd\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.962815 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-log-httpd\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.966839 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-config-data\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:44 crc kubenswrapper[4717]: I1002 14:37:44.979297 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5dxs\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-kube-api-access-k5dxs\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:45 crc kubenswrapper[4717]: I1002 14:37:45.304602 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-x7hk9" Oct 02 14:37:45 crc kubenswrapper[4717]: I1002 14:37:45.467860 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6l2nt\" (UniqueName: \"kubernetes.io/projected/cbfe66b2-314d-4120-a4d7-986b0bf5a85d-kube-api-access-6l2nt\") pod \"cbfe66b2-314d-4120-a4d7-986b0bf5a85d\" (UID: \"cbfe66b2-314d-4120-a4d7-986b0bf5a85d\") " Oct 02 14:37:45 crc kubenswrapper[4717]: I1002 14:37:45.468443 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:45 crc kubenswrapper[4717]: E1002 14:37:45.468566 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:45 crc kubenswrapper[4717]: E1002 14:37:45.468579 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd: configmap "swift-ring-files" not found Oct 02 14:37:45 crc kubenswrapper[4717]: E1002 14:37:45.468619 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift podName:33b2243b-a2fc-4332-b6c3-c4c0af731c8c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:46.468606706 +0000 UTC m=+1017.320461152 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift") pod "swift-proxy-59cb459c9f-krwjd" (UID: "33b2243b-a2fc-4332-b6c3-c4c0af731c8c") : configmap "swift-ring-files" not found Oct 02 14:37:45 crc kubenswrapper[4717]: I1002 14:37:45.474662 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbfe66b2-314d-4120-a4d7-986b0bf5a85d-kube-api-access-6l2nt" (OuterVolumeSpecName: "kube-api-access-6l2nt") pod "cbfe66b2-314d-4120-a4d7-986b0bf5a85d" (UID: "cbfe66b2-314d-4120-a4d7-986b0bf5a85d"). InnerVolumeSpecName "kube-api-access-6l2nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:37:45 crc kubenswrapper[4717]: I1002 14:37:45.480197 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-index-fmxm2"] Oct 02 14:37:45 crc kubenswrapper[4717]: I1002 14:37:45.569589 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6l2nt\" (UniqueName: \"kubernetes.io/projected/cbfe66b2-314d-4120-a4d7-986b0bf5a85d-kube-api-access-6l2nt\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:45 crc kubenswrapper[4717]: I1002 14:37:45.975481 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:45 crc kubenswrapper[4717]: E1002 14:37:45.975742 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:45 crc kubenswrapper[4717]: E1002 14:37:45.975968 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 02 14:37:45 crc kubenswrapper[4717]: E1002 14:37:45.976072 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift podName:a4258a20-8978-4f2a-bb99-793fe396938c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:53.976044949 +0000 UTC m=+1024.827899445 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift") pod "swift-storage-0" (UID: "a4258a20-8978-4f2a-bb99-793fe396938c") : configmap "swift-ring-files" not found Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.237967 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-fmxm2" event={"ID":"ec6951a5-2a08-4902-8a9b-3dd3a43bce05","Type":"ContainerStarted","Data":"a184ad05ab864f824d25dbff9456302c1780a7b8abfb33315905d879cf24ff2f"} Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.238013 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-fmxm2" event={"ID":"ec6951a5-2a08-4902-8a9b-3dd3a43bce05","Type":"ContainerStarted","Data":"9fb90774f4914ab39d906da0d4c095769147c6964b6a0f3df15e6551747d453a"} Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.240338 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-x7hk9" Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.240322 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-x7hk9" event={"ID":"cbfe66b2-314d-4120-a4d7-986b0bf5a85d","Type":"ContainerDied","Data":"c8a59d94b772f6c517b2cf0ed6be7e2280427faeca1547adf83be59ae4dcf390"} Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.240417 4717 scope.go:117] "RemoveContainer" containerID="d86da53cf14aaa0871b40b8a04a7b904f73f5b825dbf50bdf5e8c108cd7bfda3" Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.247384 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" event={"ID":"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d","Type":"ContainerStarted","Data":"dfb40e93ea43a99045a053ad125b5e494456c4853c298b59d3abf04ac093abd3"} Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.263022 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-index-fmxm2" podStartSLOduration=4.2049743490000004 podStartE2EDuration="4.26300432s" podCreationTimestamp="2025-10-02 14:37:42 +0000 UTC" firstStartedPulling="2025-10-02 14:37:45.492796619 +0000 UTC m=+1016.344651065" lastFinishedPulling="2025-10-02 14:37:45.55082659 +0000 UTC m=+1016.402681036" observedRunningTime="2025-10-02 14:37:46.260273826 +0000 UTC m=+1017.112128262" watchObservedRunningTime="2025-10-02 14:37:46.26300432 +0000 UTC m=+1017.114858766" Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.290355 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" podStartSLOduration=2.31156322 podStartE2EDuration="8.29033467s" podCreationTimestamp="2025-10-02 14:37:38 +0000 UTC" firstStartedPulling="2025-10-02 14:37:39.369115141 +0000 UTC m=+1010.220969587" lastFinishedPulling="2025-10-02 14:37:45.347886591 +0000 UTC m=+1016.199741037" observedRunningTime="2025-10-02 14:37:46.282798516 +0000 UTC m=+1017.134652962" watchObservedRunningTime="2025-10-02 14:37:46.29033467 +0000 UTC m=+1017.142189116" Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.301773 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/glance-operator-index-x7hk9"] Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.307235 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/glance-operator-index-x7hk9"] Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.484901 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:46 crc kubenswrapper[4717]: E1002 14:37:46.485117 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:46 crc kubenswrapper[4717]: E1002 14:37:46.485150 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd: configmap "swift-ring-files" not found Oct 02 14:37:46 crc kubenswrapper[4717]: E1002 14:37:46.485233 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift podName:33b2243b-a2fc-4332-b6c3-c4c0af731c8c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:48.4852075 +0000 UTC m=+1019.337061986 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift") pod "swift-proxy-59cb459c9f-krwjd" (UID: "33b2243b-a2fc-4332-b6c3-c4c0af731c8c") : configmap "swift-ring-files" not found Oct 02 14:37:46 crc kubenswrapper[4717]: I1002 14:37:46.849810 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbfe66b2-314d-4120-a4d7-986b0bf5a85d" path="/var/lib/kubelet/pods/cbfe66b2-314d-4120-a4d7-986b0bf5a85d/volumes" Oct 02 14:37:48 crc kubenswrapper[4717]: I1002 14:37:48.509549 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:48 crc kubenswrapper[4717]: E1002 14:37:48.509678 4717 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 02 14:37:48 crc kubenswrapper[4717]: E1002 14:37:48.509958 4717 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd: configmap "swift-ring-files" not found Oct 02 14:37:48 crc kubenswrapper[4717]: E1002 14:37:48.510012 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift podName:33b2243b-a2fc-4332-b6c3-c4c0af731c8c nodeName:}" failed. No retries permitted until 2025-10-02 14:37:52.509996012 +0000 UTC m=+1023.361850458 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift") pod "swift-proxy-59cb459c9f-krwjd" (UID: "33b2243b-a2fc-4332-b6c3-c4c0af731c8c") : configmap "swift-ring-files" not found Oct 02 14:37:52 crc kubenswrapper[4717]: I1002 14:37:52.293482 4717 generic.go:334] "Generic (PLEG): container finished" podID="4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" containerID="dfb40e93ea43a99045a053ad125b5e494456c4853c298b59d3abf04ac093abd3" exitCode=0 Oct 02 14:37:52 crc kubenswrapper[4717]: I1002 14:37:52.293607 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" event={"ID":"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d","Type":"ContainerDied","Data":"dfb40e93ea43a99045a053ad125b5e494456c4853c298b59d3abf04ac093abd3"} Oct 02 14:37:52 crc kubenswrapper[4717]: I1002 14:37:52.564146 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:52 crc kubenswrapper[4717]: I1002 14:37:52.573438 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b2243b-a2fc-4332-b6c3-c4c0af731c8c-etc-swift\") pod \"swift-proxy-59cb459c9f-krwjd\" (UID: \"33b2243b-a2fc-4332-b6c3-c4c0af731c8c\") " pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:52 crc kubenswrapper[4717]: I1002 14:37:52.627531 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.056272 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd"] Oct 02 14:37:53 crc kubenswrapper[4717]: W1002 14:37:53.060595 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33b2243b_a2fc_4332_b6c3_c4c0af731c8c.slice/crio-eb5939aa37507cdec480bec17189ae49c4473b8fe1465ea10ad2548168e4eaff WatchSource:0}: Error finding container eb5939aa37507cdec480bec17189ae49c4473b8fe1465ea10ad2548168e4eaff: Status 404 returned error can't find the container with id eb5939aa37507cdec480bec17189ae49c4473b8fe1465ea10ad2548168e4eaff Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.261267 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.261325 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.289754 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.302113 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" event={"ID":"33b2243b-a2fc-4332-b6c3-c4c0af731c8c","Type":"ContainerStarted","Data":"eb5939aa37507cdec480bec17189ae49c4473b8fe1465ea10ad2548168e4eaff"} Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.347184 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-index-fmxm2" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.535136 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.677823 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nsfs\" (UniqueName: \"kubernetes.io/projected/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-kube-api-access-2nsfs\") pod \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.677887 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-dispersionconf\") pod \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.678005 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-ring-data-devices\") pod \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.678057 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-etc-swift\") pod \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.678145 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-swiftconf\") pod \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.678175 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-scripts\") pod \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\" (UID: \"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d\") " Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.678674 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" (UID: "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.679347 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" (UID: "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.682616 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-kube-api-access-2nsfs" (OuterVolumeSpecName: "kube-api-access-2nsfs") pod "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" (UID: "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d"). InnerVolumeSpecName "kube-api-access-2nsfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.689100 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" (UID: "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.697875 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-scripts" (OuterVolumeSpecName: "scripts") pod "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" (UID: "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.713182 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" (UID: "4d807c62-a7f6-43c8-bd1d-826a8bb17e0d"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.779997 4717 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.780259 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.780269 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nsfs\" (UniqueName: \"kubernetes.io/projected/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-kube-api-access-2nsfs\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.780279 4717 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.780289 4717 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.780297 4717 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/4d807c62-a7f6-43c8-bd1d-826a8bb17e0d-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.983473 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:53 crc kubenswrapper[4717]: I1002 14:37:53.999336 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a4258a20-8978-4f2a-bb99-793fe396938c-etc-swift\") pod \"swift-storage-0\" (UID: \"a4258a20-8978-4f2a-bb99-793fe396938c\") " pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.014169 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-storage-0" Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.310895 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" event={"ID":"33b2243b-a2fc-4332-b6c3-c4c0af731c8c","Type":"ContainerStarted","Data":"695c487ef3ebda2724573a301b2097cd2750c347e355b5c1b62461d50b4ce104"} Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.310954 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" event={"ID":"33b2243b-a2fc-4332-b6c3-c4c0af731c8c","Type":"ContainerStarted","Data":"d2e43bc165e074d8fa28ab006e5e6f151b9a4be7dd1070be2b612540b7ed685f"} Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.311489 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.311524 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.316359 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.318074 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-tcfzv" event={"ID":"4d807c62-a7f6-43c8-bd1d-826a8bb17e0d","Type":"ContainerDied","Data":"8e00628c680747d37f3be01a4aaa548a8290f077186d804bbbf424744f423474"} Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.318107 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e00628c680747d37f3be01a4aaa548a8290f077186d804bbbf424744f423474" Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.338612 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" podStartSLOduration=10.33859268 podStartE2EDuration="10.33859268s" podCreationTimestamp="2025-10-02 14:37:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:37:54.334242063 +0000 UTC m=+1025.186096509" watchObservedRunningTime="2025-10-02 14:37:54.33859268 +0000 UTC m=+1025.190447116" Oct 02 14:37:54 crc kubenswrapper[4717]: I1002 14:37:54.459276 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Oct 02 14:37:54 crc kubenswrapper[4717]: W1002 14:37:54.465021 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4258a20_8978_4f2a_bb99_793fe396938c.slice/crio-39c9cfb169406f6cd1ebb32a280ba0641463578cb30902f7ff57ae65c7185ff5 WatchSource:0}: Error finding container 39c9cfb169406f6cd1ebb32a280ba0641463578cb30902f7ff57ae65c7185ff5: Status 404 returned error can't find the container with id 39c9cfb169406f6cd1ebb32a280ba0641463578cb30902f7ff57ae65c7185ff5 Oct 02 14:37:55 crc kubenswrapper[4717]: I1002 14:37:55.326558 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"39c9cfb169406f6cd1ebb32a280ba0641463578cb30902f7ff57ae65c7185ff5"} Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.034211 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8"] Oct 02 14:37:56 crc kubenswrapper[4717]: E1002 14:37:56.034775 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbfe66b2-314d-4120-a4d7-986b0bf5a85d" containerName="registry-server" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.034786 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbfe66b2-314d-4120-a4d7-986b0bf5a85d" containerName="registry-server" Oct 02 14:37:56 crc kubenswrapper[4717]: E1002 14:37:56.034802 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" containerName="swift-ring-rebalance" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.034808 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" containerName="swift-ring-rebalance" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.034984 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbfe66b2-314d-4120-a4d7-986b0bf5a85d" containerName="registry-server" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.034996 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d807c62-a7f6-43c8-bd1d-826a8bb17e0d" containerName="swift-ring-rebalance" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.035824 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.040965 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-dzqss" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.101038 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8"] Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.113657 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-util\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.113749 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlkr9\" (UniqueName: \"kubernetes.io/projected/f091692c-789e-45e1-aa38-a06ad59db093-kube-api-access-qlkr9\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.113874 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-bundle\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.215296 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlkr9\" (UniqueName: \"kubernetes.io/projected/f091692c-789e-45e1-aa38-a06ad59db093-kube-api-access-qlkr9\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.215361 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-bundle\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.215396 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-util\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.215785 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-util\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.215986 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-bundle\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.247972 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlkr9\" (UniqueName: \"kubernetes.io/projected/f091692c-789e-45e1-aa38-a06ad59db093-kube-api-access-qlkr9\") pod \"33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.340355 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"7b34c63366efbeacdc4742a907bc0cbea9d0b23204936bb08a1385b1ea1371bd"} Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.340677 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"6b0b40316b050850ba6b5db8eb1e2edadda6218cb15bcbc133ceb90ebc506e7a"} Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.351912 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:37:56 crc kubenswrapper[4717]: I1002 14:37:56.790664 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8"] Oct 02 14:37:57 crc kubenswrapper[4717]: I1002 14:37:57.351375 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"79de1be2ba13668e0dc5170acb824b55c07c0718f2a81c4cc04a53ac2db55662"} Oct 02 14:37:57 crc kubenswrapper[4717]: I1002 14:37:57.351426 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"a30bff98fdd2409122f92a66c9725dc32a0a22abe2882446c06d5d1ecb4c8cb2"} Oct 02 14:37:57 crc kubenswrapper[4717]: I1002 14:37:57.354683 4717 generic.go:334] "Generic (PLEG): container finished" podID="f091692c-789e-45e1-aa38-a06ad59db093" containerID="724c3b71001a85d1a72278a4b2603a3ee56850f3aec412b2ba72ef50d589ca77" exitCode=0 Oct 02 14:37:57 crc kubenswrapper[4717]: I1002 14:37:57.354711 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" event={"ID":"f091692c-789e-45e1-aa38-a06ad59db093","Type":"ContainerDied","Data":"724c3b71001a85d1a72278a4b2603a3ee56850f3aec412b2ba72ef50d589ca77"} Oct 02 14:37:57 crc kubenswrapper[4717]: I1002 14:37:57.354732 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" event={"ID":"f091692c-789e-45e1-aa38-a06ad59db093","Type":"ContainerStarted","Data":"a391530d949f3c7ba632bebcb46358d849dae8515ad22934abee6e4995749835"} Oct 02 14:37:59 crc kubenswrapper[4717]: I1002 14:37:59.374407 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"97a97f0ed11b05cfdd99b1e7c494bb94109db105ec601ad0b9a9b5567b52885a"} Oct 02 14:37:59 crc kubenswrapper[4717]: I1002 14:37:59.375043 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"bff46ae55c2952721c4c66a446c7ad882c8f4f113e834c4953c0a82560342a1f"} Oct 02 14:37:59 crc kubenswrapper[4717]: I1002 14:37:59.375057 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"932317ca349ecab99c1abd298b8a0c88a72332f646f29d39f1b7ddd70d93533c"} Oct 02 14:37:59 crc kubenswrapper[4717]: I1002 14:37:59.375067 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"f8a1faaa0835c0d6819ce419c6f9ef7812e0eb4a734c7985a73fa7359966f6cd"} Oct 02 14:37:59 crc kubenswrapper[4717]: I1002 14:37:59.377075 4717 generic.go:334] "Generic (PLEG): container finished" podID="f091692c-789e-45e1-aa38-a06ad59db093" containerID="f4b7963d0899ad54b3e476ab75ef101f87a8a29d43b0fde85cca0129470bc561" exitCode=0 Oct 02 14:37:59 crc kubenswrapper[4717]: I1002 14:37:59.377102 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" event={"ID":"f091692c-789e-45e1-aa38-a06ad59db093","Type":"ContainerDied","Data":"f4b7963d0899ad54b3e476ab75ef101f87a8a29d43b0fde85cca0129470bc561"} Oct 02 14:38:00 crc kubenswrapper[4717]: I1002 14:38:00.387245 4717 generic.go:334] "Generic (PLEG): container finished" podID="f091692c-789e-45e1-aa38-a06ad59db093" containerID="711a8922bd562ef64ea088113a422a135cb620258faccd98b2d560f3db85ea39" exitCode=0 Oct 02 14:38:00 crc kubenswrapper[4717]: I1002 14:38:00.387577 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" event={"ID":"f091692c-789e-45e1-aa38-a06ad59db093","Type":"ContainerDied","Data":"711a8922bd562ef64ea088113a422a135cb620258faccd98b2d560f3db85ea39"} Oct 02 14:38:00 crc kubenswrapper[4717]: I1002 14:38:00.415685 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"a1094debdb84ceb0381523e9c299361dd61f20e3eea17c58e9daa2317864ca7c"} Oct 02 14:38:00 crc kubenswrapper[4717]: I1002 14:38:00.415718 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"1b060cc2688798f758aff04b66d7d9c64985a1622ead38321831c91d4c3cf00e"} Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.451340 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"f83a49d417a7875375f0583fd84c1e922e439a32135a886acab2b81aa5723eeb"} Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.451410 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"d66b4ae9dbd51ad5260e43538cbe069831e60df5b10b55f8467d07e83b9ea914"} Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.451424 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"bc61124986845c47e488571ffc5b55d7472701a842452aeb554195c4daa1a832"} Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.451438 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"241a9c429dd4fd6f25daa0a7cb3787963a6f9f9da692714e2a7d4256d139b4c4"} Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.451450 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"a4258a20-8978-4f2a-bb99-793fe396938c","Type":"ContainerStarted","Data":"acc4e1efefd914a3e91b7726704a4b30c4ded00bd94921e1206fe4c8175ae628"} Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.484413 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-storage-0" podStartSLOduration=18.856064451 podStartE2EDuration="24.484391354s" podCreationTimestamp="2025-10-02 14:37:37 +0000 UTC" firstStartedPulling="2025-10-02 14:37:54.467111546 +0000 UTC m=+1025.318965992" lastFinishedPulling="2025-10-02 14:38:00.095438449 +0000 UTC m=+1030.947292895" observedRunningTime="2025-10-02 14:38:01.480871738 +0000 UTC m=+1032.332726204" watchObservedRunningTime="2025-10-02 14:38:01.484391354 +0000 UTC m=+1032.336245820" Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.788302 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.898406 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-util\") pod \"f091692c-789e-45e1-aa38-a06ad59db093\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.898477 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlkr9\" (UniqueName: \"kubernetes.io/projected/f091692c-789e-45e1-aa38-a06ad59db093-kube-api-access-qlkr9\") pod \"f091692c-789e-45e1-aa38-a06ad59db093\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.898792 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-bundle\") pod \"f091692c-789e-45e1-aa38-a06ad59db093\" (UID: \"f091692c-789e-45e1-aa38-a06ad59db093\") " Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.900566 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-bundle" (OuterVolumeSpecName: "bundle") pod "f091692c-789e-45e1-aa38-a06ad59db093" (UID: "f091692c-789e-45e1-aa38-a06ad59db093"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.904984 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f091692c-789e-45e1-aa38-a06ad59db093-kube-api-access-qlkr9" (OuterVolumeSpecName: "kube-api-access-qlkr9") pod "f091692c-789e-45e1-aa38-a06ad59db093" (UID: "f091692c-789e-45e1-aa38-a06ad59db093"). InnerVolumeSpecName "kube-api-access-qlkr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:38:01 crc kubenswrapper[4717]: I1002 14:38:01.912737 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-util" (OuterVolumeSpecName: "util") pod "f091692c-789e-45e1-aa38-a06ad59db093" (UID: "f091692c-789e-45e1-aa38-a06ad59db093"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:38:02 crc kubenswrapper[4717]: I1002 14:38:02.000911 4717 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:38:02 crc kubenswrapper[4717]: I1002 14:38:02.001312 4717 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f091692c-789e-45e1-aa38-a06ad59db093-util\") on node \"crc\" DevicePath \"\"" Oct 02 14:38:02 crc kubenswrapper[4717]: I1002 14:38:02.001327 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlkr9\" (UniqueName: \"kubernetes.io/projected/f091692c-789e-45e1-aa38-a06ad59db093-kube-api-access-qlkr9\") on node \"crc\" DevicePath \"\"" Oct 02 14:38:02 crc kubenswrapper[4717]: I1002 14:38:02.458981 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" event={"ID":"f091692c-789e-45e1-aa38-a06ad59db093","Type":"ContainerDied","Data":"a391530d949f3c7ba632bebcb46358d849dae8515ad22934abee6e4995749835"} Oct 02 14:38:02 crc kubenswrapper[4717]: I1002 14:38:02.459010 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8" Oct 02 14:38:02 crc kubenswrapper[4717]: I1002 14:38:02.459033 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a391530d949f3c7ba632bebcb46358d849dae8515ad22934abee6e4995749835" Oct 02 14:38:02 crc kubenswrapper[4717]: I1002 14:38:02.630897 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:38:02 crc kubenswrapper[4717]: I1002 14:38:02.632500 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/swift-proxy-59cb459c9f-krwjd" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.444864 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t"] Oct 02 14:38:16 crc kubenswrapper[4717]: E1002 14:38:16.446079 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f091692c-789e-45e1-aa38-a06ad59db093" containerName="util" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.446096 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="f091692c-789e-45e1-aa38-a06ad59db093" containerName="util" Oct 02 14:38:16 crc kubenswrapper[4717]: E1002 14:38:16.446122 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f091692c-789e-45e1-aa38-a06ad59db093" containerName="pull" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.446137 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="f091692c-789e-45e1-aa38-a06ad59db093" containerName="pull" Oct 02 14:38:16 crc kubenswrapper[4717]: E1002 14:38:16.446146 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f091692c-789e-45e1-aa38-a06ad59db093" containerName="extract" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.446153 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="f091692c-789e-45e1-aa38-a06ad59db093" containerName="extract" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.446322 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="f091692c-789e-45e1-aa38-a06ad59db093" containerName="extract" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.447262 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.450833 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-hsjbn" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.457705 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t"] Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.463453 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-service-cert" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.522003 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0b9d395-74ea-4b6e-9700-07e464512c7e-apiservice-cert\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.522045 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0b9d395-74ea-4b6e-9700-07e464512c7e-webhook-cert\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.522109 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g54s4\" (UniqueName: \"kubernetes.io/projected/e0b9d395-74ea-4b6e-9700-07e464512c7e-kube-api-access-g54s4\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.623364 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0b9d395-74ea-4b6e-9700-07e464512c7e-apiservice-cert\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.623606 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0b9d395-74ea-4b6e-9700-07e464512c7e-webhook-cert\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.623796 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g54s4\" (UniqueName: \"kubernetes.io/projected/e0b9d395-74ea-4b6e-9700-07e464512c7e-kube-api-access-g54s4\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.629042 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0b9d395-74ea-4b6e-9700-07e464512c7e-webhook-cert\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.635662 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0b9d395-74ea-4b6e-9700-07e464512c7e-apiservice-cert\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.644117 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g54s4\" (UniqueName: \"kubernetes.io/projected/e0b9d395-74ea-4b6e-9700-07e464512c7e-kube-api-access-g54s4\") pod \"glance-operator-controller-manager-6677765f49-4gg6t\" (UID: \"e0b9d395-74ea-4b6e-9700-07e464512c7e\") " pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:16 crc kubenswrapper[4717]: I1002 14:38:16.768123 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:17 crc kubenswrapper[4717]: I1002 14:38:17.132192 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t"] Oct 02 14:38:17 crc kubenswrapper[4717]: W1002 14:38:17.139036 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0b9d395_74ea_4b6e_9700_07e464512c7e.slice/crio-49e77484703821c6a39d3536aba6b58cdc5465070f3f3c40571dfc7975a33b10 WatchSource:0}: Error finding container 49e77484703821c6a39d3536aba6b58cdc5465070f3f3c40571dfc7975a33b10: Status 404 returned error can't find the container with id 49e77484703821c6a39d3536aba6b58cdc5465070f3f3c40571dfc7975a33b10 Oct 02 14:38:17 crc kubenswrapper[4717]: I1002 14:38:17.571564 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" event={"ID":"e0b9d395-74ea-4b6e-9700-07e464512c7e","Type":"ContainerStarted","Data":"49e77484703821c6a39d3536aba6b58cdc5465070f3f3c40571dfc7975a33b10"} Oct 02 14:38:19 crc kubenswrapper[4717]: I1002 14:38:19.596210 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" event={"ID":"e0b9d395-74ea-4b6e-9700-07e464512c7e","Type":"ContainerStarted","Data":"5726d04a6a3d08e384895261f6362524d28590e98c44ba738489cc38a1ffaf54"} Oct 02 14:38:19 crc kubenswrapper[4717]: I1002 14:38:19.596797 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:19 crc kubenswrapper[4717]: I1002 14:38:19.596812 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" event={"ID":"e0b9d395-74ea-4b6e-9700-07e464512c7e","Type":"ContainerStarted","Data":"d7c0c2741899466c33b51510cfe724e1b2a581c31d7ebb50f3902326673b391d"} Oct 02 14:38:19 crc kubenswrapper[4717]: I1002 14:38:19.617771 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" podStartSLOduration=1.572015615 podStartE2EDuration="3.617748713s" podCreationTimestamp="2025-10-02 14:38:16 +0000 UTC" firstStartedPulling="2025-10-02 14:38:17.140158564 +0000 UTC m=+1047.992013010" lastFinishedPulling="2025-10-02 14:38:19.185891662 +0000 UTC m=+1050.037746108" observedRunningTime="2025-10-02 14:38:19.611923865 +0000 UTC m=+1050.463778351" watchObservedRunningTime="2025-10-02 14:38:19.617748713 +0000 UTC m=+1050.469603159" Oct 02 14:38:26 crc kubenswrapper[4717]: I1002 14:38:26.773882 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-6677765f49-4gg6t" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.300218 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.301438 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.303149 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"openstack-config-secret" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.303313 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts-9db6gc427h" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.303546 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.303740 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"default-dockercfg-dnbdj" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.318110 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.354504 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-qn6cv"] Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.355568 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-qn6cv" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.362427 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-qn6cv"] Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.444539 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-scripts\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.444835 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7zj6\" (UniqueName: \"kubernetes.io/projected/aacc45bd-80df-4155-b773-d7fae5f6aaa5-kube-api-access-q7zj6\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.445069 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.445242 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r86cn\" (UniqueName: \"kubernetes.io/projected/dd3c7e7f-b1a7-40fe-847e-7c01c213273e-kube-api-access-r86cn\") pod \"glance-db-create-qn6cv\" (UID: \"dd3c7e7f-b1a7-40fe-847e-7c01c213273e\") " pod="glance-kuttl-tests/glance-db-create-qn6cv" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.445393 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config-secret\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.546571 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-scripts\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.546623 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7zj6\" (UniqueName: \"kubernetes.io/projected/aacc45bd-80df-4155-b773-d7fae5f6aaa5-kube-api-access-q7zj6\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.546655 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.546688 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r86cn\" (UniqueName: \"kubernetes.io/projected/dd3c7e7f-b1a7-40fe-847e-7c01c213273e-kube-api-access-r86cn\") pod \"glance-db-create-qn6cv\" (UID: \"dd3c7e7f-b1a7-40fe-847e-7c01c213273e\") " pod="glance-kuttl-tests/glance-db-create-qn6cv" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.546726 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config-secret\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.547671 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-scripts\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.547700 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.552004 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config-secret\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.563271 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r86cn\" (UniqueName: \"kubernetes.io/projected/dd3c7e7f-b1a7-40fe-847e-7c01c213273e-kube-api-access-r86cn\") pod \"glance-db-create-qn6cv\" (UID: \"dd3c7e7f-b1a7-40fe-847e-7c01c213273e\") " pod="glance-kuttl-tests/glance-db-create-qn6cv" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.569998 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7zj6\" (UniqueName: \"kubernetes.io/projected/aacc45bd-80df-4155-b773-d7fae5f6aaa5-kube-api-access-q7zj6\") pod \"openstackclient\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.617271 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 02 14:38:30 crc kubenswrapper[4717]: I1002 14:38:30.671347 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-qn6cv" Oct 02 14:38:31 crc kubenswrapper[4717]: I1002 14:38:31.040228 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:38:31 crc kubenswrapper[4717]: W1002 14:38:31.041792 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaacc45bd_80df_4155_b773_d7fae5f6aaa5.slice/crio-e1cb6fa95b7d18af7785133d9da0a25f059a6e3ddc24a7d0040aaf21f697e55a WatchSource:0}: Error finding container e1cb6fa95b7d18af7785133d9da0a25f059a6e3ddc24a7d0040aaf21f697e55a: Status 404 returned error can't find the container with id e1cb6fa95b7d18af7785133d9da0a25f059a6e3ddc24a7d0040aaf21f697e55a Oct 02 14:38:31 crc kubenswrapper[4717]: I1002 14:38:31.110397 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-qn6cv"] Oct 02 14:38:31 crc kubenswrapper[4717]: W1002 14:38:31.113833 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd3c7e7f_b1a7_40fe_847e_7c01c213273e.slice/crio-9f2303ea348ba2939d42b88ca6ed95772825f1552d9ec2c4dd247b02357cd6af WatchSource:0}: Error finding container 9f2303ea348ba2939d42b88ca6ed95772825f1552d9ec2c4dd247b02357cd6af: Status 404 returned error can't find the container with id 9f2303ea348ba2939d42b88ca6ed95772825f1552d9ec2c4dd247b02357cd6af Oct 02 14:38:31 crc kubenswrapper[4717]: I1002 14:38:31.691197 4717 generic.go:334] "Generic (PLEG): container finished" podID="dd3c7e7f-b1a7-40fe-847e-7c01c213273e" containerID="729ebb57bfea4200c89676478bd5eb660ffc69160fbe99a8d81e3c6e4c23487d" exitCode=0 Oct 02 14:38:31 crc kubenswrapper[4717]: I1002 14:38:31.691299 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-qn6cv" event={"ID":"dd3c7e7f-b1a7-40fe-847e-7c01c213273e","Type":"ContainerDied","Data":"729ebb57bfea4200c89676478bd5eb660ffc69160fbe99a8d81e3c6e4c23487d"} Oct 02 14:38:31 crc kubenswrapper[4717]: I1002 14:38:31.691738 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-qn6cv" event={"ID":"dd3c7e7f-b1a7-40fe-847e-7c01c213273e","Type":"ContainerStarted","Data":"9f2303ea348ba2939d42b88ca6ed95772825f1552d9ec2c4dd247b02357cd6af"} Oct 02 14:38:31 crc kubenswrapper[4717]: I1002 14:38:31.692915 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"aacc45bd-80df-4155-b773-d7fae5f6aaa5","Type":"ContainerStarted","Data":"e1cb6fa95b7d18af7785133d9da0a25f059a6e3ddc24a7d0040aaf21f697e55a"} Oct 02 14:38:32 crc kubenswrapper[4717]: I1002 14:38:32.978159 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-qn6cv" Oct 02 14:38:33 crc kubenswrapper[4717]: I1002 14:38:33.084250 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r86cn\" (UniqueName: \"kubernetes.io/projected/dd3c7e7f-b1a7-40fe-847e-7c01c213273e-kube-api-access-r86cn\") pod \"dd3c7e7f-b1a7-40fe-847e-7c01c213273e\" (UID: \"dd3c7e7f-b1a7-40fe-847e-7c01c213273e\") " Oct 02 14:38:33 crc kubenswrapper[4717]: I1002 14:38:33.102362 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd3c7e7f-b1a7-40fe-847e-7c01c213273e-kube-api-access-r86cn" (OuterVolumeSpecName: "kube-api-access-r86cn") pod "dd3c7e7f-b1a7-40fe-847e-7c01c213273e" (UID: "dd3c7e7f-b1a7-40fe-847e-7c01c213273e"). InnerVolumeSpecName "kube-api-access-r86cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:38:33 crc kubenswrapper[4717]: I1002 14:38:33.186369 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r86cn\" (UniqueName: \"kubernetes.io/projected/dd3c7e7f-b1a7-40fe-847e-7c01c213273e-kube-api-access-r86cn\") on node \"crc\" DevicePath \"\"" Oct 02 14:38:33 crc kubenswrapper[4717]: I1002 14:38:33.708501 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-qn6cv" event={"ID":"dd3c7e7f-b1a7-40fe-847e-7c01c213273e","Type":"ContainerDied","Data":"9f2303ea348ba2939d42b88ca6ed95772825f1552d9ec2c4dd247b02357cd6af"} Oct 02 14:38:33 crc kubenswrapper[4717]: I1002 14:38:33.708538 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f2303ea348ba2939d42b88ca6ed95772825f1552d9ec2c4dd247b02357cd6af" Oct 02 14:38:33 crc kubenswrapper[4717]: I1002 14:38:33.708587 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-qn6cv" Oct 02 14:38:38 crc kubenswrapper[4717]: I1002 14:38:38.746789 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"aacc45bd-80df-4155-b773-d7fae5f6aaa5","Type":"ContainerStarted","Data":"f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f"} Oct 02 14:38:38 crc kubenswrapper[4717]: I1002 14:38:38.765058 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstackclient" podStartSLOduration=1.5363558899999998 podStartE2EDuration="8.765040434s" podCreationTimestamp="2025-10-02 14:38:30 +0000 UTC" firstStartedPulling="2025-10-02 14:38:31.044141397 +0000 UTC m=+1061.895995833" lastFinishedPulling="2025-10-02 14:38:38.272825931 +0000 UTC m=+1069.124680377" observedRunningTime="2025-10-02 14:38:38.762405032 +0000 UTC m=+1069.614259488" watchObservedRunningTime="2025-10-02 14:38:38.765040434 +0000 UTC m=+1069.616894900" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.332376 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-6163-account-create-fww5s"] Oct 02 14:38:40 crc kubenswrapper[4717]: E1002 14:38:40.333031 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3c7e7f-b1a7-40fe-847e-7c01c213273e" containerName="mariadb-database-create" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.333046 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3c7e7f-b1a7-40fe-847e-7c01c213273e" containerName="mariadb-database-create" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.333218 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd3c7e7f-b1a7-40fe-847e-7c01c213273e" containerName="mariadb-database-create" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.333724 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-6163-account-create-fww5s" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.337013 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.353011 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-6163-account-create-fww5s"] Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.397979 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6qlk\" (UniqueName: \"kubernetes.io/projected/7ea11fb5-41b6-47b6-9143-979e994ab8ab-kube-api-access-t6qlk\") pod \"glance-6163-account-create-fww5s\" (UID: \"7ea11fb5-41b6-47b6-9143-979e994ab8ab\") " pod="glance-kuttl-tests/glance-6163-account-create-fww5s" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.499884 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6qlk\" (UniqueName: \"kubernetes.io/projected/7ea11fb5-41b6-47b6-9143-979e994ab8ab-kube-api-access-t6qlk\") pod \"glance-6163-account-create-fww5s\" (UID: \"7ea11fb5-41b6-47b6-9143-979e994ab8ab\") " pod="glance-kuttl-tests/glance-6163-account-create-fww5s" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.521788 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6qlk\" (UniqueName: \"kubernetes.io/projected/7ea11fb5-41b6-47b6-9143-979e994ab8ab-kube-api-access-t6qlk\") pod \"glance-6163-account-create-fww5s\" (UID: \"7ea11fb5-41b6-47b6-9143-979e994ab8ab\") " pod="glance-kuttl-tests/glance-6163-account-create-fww5s" Oct 02 14:38:40 crc kubenswrapper[4717]: I1002 14:38:40.687228 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-6163-account-create-fww5s" Oct 02 14:38:41 crc kubenswrapper[4717]: I1002 14:38:41.096185 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-6163-account-create-fww5s"] Oct 02 14:38:41 crc kubenswrapper[4717]: I1002 14:38:41.775595 4717 generic.go:334] "Generic (PLEG): container finished" podID="7ea11fb5-41b6-47b6-9143-979e994ab8ab" containerID="f0574f484e5443d13f88806c09fe4e00f2fd7cb8002a3e0915423008edd1a964" exitCode=0 Oct 02 14:38:41 crc kubenswrapper[4717]: I1002 14:38:41.775658 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-6163-account-create-fww5s" event={"ID":"7ea11fb5-41b6-47b6-9143-979e994ab8ab","Type":"ContainerDied","Data":"f0574f484e5443d13f88806c09fe4e00f2fd7cb8002a3e0915423008edd1a964"} Oct 02 14:38:41 crc kubenswrapper[4717]: I1002 14:38:41.776054 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-6163-account-create-fww5s" event={"ID":"7ea11fb5-41b6-47b6-9143-979e994ab8ab","Type":"ContainerStarted","Data":"47ec6ed6258716ab50cdb08de83b4188a65c8c50e8688e17346523c038122766"} Oct 02 14:38:43 crc kubenswrapper[4717]: I1002 14:38:43.078334 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-6163-account-create-fww5s" Oct 02 14:38:43 crc kubenswrapper[4717]: I1002 14:38:43.140956 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6qlk\" (UniqueName: \"kubernetes.io/projected/7ea11fb5-41b6-47b6-9143-979e994ab8ab-kube-api-access-t6qlk\") pod \"7ea11fb5-41b6-47b6-9143-979e994ab8ab\" (UID: \"7ea11fb5-41b6-47b6-9143-979e994ab8ab\") " Oct 02 14:38:43 crc kubenswrapper[4717]: I1002 14:38:43.148349 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ea11fb5-41b6-47b6-9143-979e994ab8ab-kube-api-access-t6qlk" (OuterVolumeSpecName: "kube-api-access-t6qlk") pod "7ea11fb5-41b6-47b6-9143-979e994ab8ab" (UID: "7ea11fb5-41b6-47b6-9143-979e994ab8ab"). InnerVolumeSpecName "kube-api-access-t6qlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:38:43 crc kubenswrapper[4717]: I1002 14:38:43.242225 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6qlk\" (UniqueName: \"kubernetes.io/projected/7ea11fb5-41b6-47b6-9143-979e994ab8ab-kube-api-access-t6qlk\") on node \"crc\" DevicePath \"\"" Oct 02 14:38:43 crc kubenswrapper[4717]: I1002 14:38:43.791755 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-6163-account-create-fww5s" event={"ID":"7ea11fb5-41b6-47b6-9143-979e994ab8ab","Type":"ContainerDied","Data":"47ec6ed6258716ab50cdb08de83b4188a65c8c50e8688e17346523c038122766"} Oct 02 14:38:43 crc kubenswrapper[4717]: I1002 14:38:43.791796 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47ec6ed6258716ab50cdb08de83b4188a65c8c50e8688e17346523c038122766" Oct 02 14:38:43 crc kubenswrapper[4717]: I1002 14:38:43.792342 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-6163-account-create-fww5s" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.420277 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-8n8js"] Oct 02 14:38:45 crc kubenswrapper[4717]: E1002 14:38:45.421514 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ea11fb5-41b6-47b6-9143-979e994ab8ab" containerName="mariadb-account-create" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.421545 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ea11fb5-41b6-47b6-9143-979e994ab8ab" containerName="mariadb-account-create" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.421855 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ea11fb5-41b6-47b6-9143-979e994ab8ab" containerName="mariadb-account-create" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.423010 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.425200 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-7pm5h" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.426108 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.430562 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-8n8js"] Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.476314 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-db-sync-config-data\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.476403 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc922\" (UniqueName: \"kubernetes.io/projected/28f02d25-7483-4463-953d-c83788015501-kube-api-access-gc922\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.476457 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-config-data\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.578445 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-db-sync-config-data\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.578582 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc922\" (UniqueName: \"kubernetes.io/projected/28f02d25-7483-4463-953d-c83788015501-kube-api-access-gc922\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.578653 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-config-data\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.584252 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-db-sync-config-data\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.584651 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-config-data\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.599382 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc922\" (UniqueName: \"kubernetes.io/projected/28f02d25-7483-4463-953d-c83788015501-kube-api-access-gc922\") pod \"glance-db-sync-8n8js\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.752563 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:38:45 crc kubenswrapper[4717]: I1002 14:38:45.994393 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-8n8js"] Oct 02 14:38:46 crc kubenswrapper[4717]: I1002 14:38:46.824818 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-8n8js" event={"ID":"28f02d25-7483-4463-953d-c83788015501","Type":"ContainerStarted","Data":"262db7da5703b59e26847a217d7643d2594fab0aec367392bbe0fbb930fc6065"} Oct 02 14:38:48 crc kubenswrapper[4717]: I1002 14:38:48.619915 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:38:48 crc kubenswrapper[4717]: I1002 14:38:48.620318 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:39:01 crc kubenswrapper[4717]: I1002 14:39:01.972889 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-8n8js" event={"ID":"28f02d25-7483-4463-953d-c83788015501","Type":"ContainerStarted","Data":"cd64fbe8e2891ccf40265ed0a3175e6f97ce04529e678e0961d993a11d7fc1ed"} Oct 02 14:39:01 crc kubenswrapper[4717]: I1002 14:39:01.992172 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-8n8js" podStartSLOduration=2.28046237 podStartE2EDuration="16.992148517s" podCreationTimestamp="2025-10-02 14:38:45 +0000 UTC" firstStartedPulling="2025-10-02 14:38:45.993921674 +0000 UTC m=+1076.845776140" lastFinishedPulling="2025-10-02 14:39:00.705607831 +0000 UTC m=+1091.557462287" observedRunningTime="2025-10-02 14:39:01.991081568 +0000 UTC m=+1092.842936034" watchObservedRunningTime="2025-10-02 14:39:01.992148517 +0000 UTC m=+1092.844002973" Oct 02 14:39:08 crc kubenswrapper[4717]: I1002 14:39:08.018587 4717 generic.go:334] "Generic (PLEG): container finished" podID="28f02d25-7483-4463-953d-c83788015501" containerID="cd64fbe8e2891ccf40265ed0a3175e6f97ce04529e678e0961d993a11d7fc1ed" exitCode=0 Oct 02 14:39:08 crc kubenswrapper[4717]: I1002 14:39:08.018746 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-8n8js" event={"ID":"28f02d25-7483-4463-953d-c83788015501","Type":"ContainerDied","Data":"cd64fbe8e2891ccf40265ed0a3175e6f97ce04529e678e0961d993a11d7fc1ed"} Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.319459 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.458660 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-config-data\") pod \"28f02d25-7483-4463-953d-c83788015501\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.458731 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-db-sync-config-data\") pod \"28f02d25-7483-4463-953d-c83788015501\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.458763 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc922\" (UniqueName: \"kubernetes.io/projected/28f02d25-7483-4463-953d-c83788015501-kube-api-access-gc922\") pod \"28f02d25-7483-4463-953d-c83788015501\" (UID: \"28f02d25-7483-4463-953d-c83788015501\") " Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.464655 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28f02d25-7483-4463-953d-c83788015501-kube-api-access-gc922" (OuterVolumeSpecName: "kube-api-access-gc922") pod "28f02d25-7483-4463-953d-c83788015501" (UID: "28f02d25-7483-4463-953d-c83788015501"). InnerVolumeSpecName "kube-api-access-gc922". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.466970 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "28f02d25-7483-4463-953d-c83788015501" (UID: "28f02d25-7483-4463-953d-c83788015501"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.505603 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-config-data" (OuterVolumeSpecName: "config-data") pod "28f02d25-7483-4463-953d-c83788015501" (UID: "28f02d25-7483-4463-953d-c83788015501"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.561004 4717 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.561052 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc922\" (UniqueName: \"kubernetes.io/projected/28f02d25-7483-4463-953d-c83788015501-kube-api-access-gc922\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:09 crc kubenswrapper[4717]: I1002 14:39:09.561076 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28f02d25-7483-4463-953d-c83788015501-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:10 crc kubenswrapper[4717]: I1002 14:39:10.041007 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-8n8js" event={"ID":"28f02d25-7483-4463-953d-c83788015501","Type":"ContainerDied","Data":"262db7da5703b59e26847a217d7643d2594fab0aec367392bbe0fbb930fc6065"} Oct 02 14:39:10 crc kubenswrapper[4717]: I1002 14:39:10.041742 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="262db7da5703b59e26847a217d7643d2594fab0aec367392bbe0fbb930fc6065" Oct 02 14:39:10 crc kubenswrapper[4717]: I1002 14:39:10.041106 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-8n8js" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.577772 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:11 crc kubenswrapper[4717]: E1002 14:39:11.578382 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28f02d25-7483-4463-953d-c83788015501" containerName="glance-db-sync" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.578401 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="28f02d25-7483-4463-953d-c83788015501" containerName="glance-db-sync" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.578599 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="28f02d25-7483-4463-953d-c83788015501" containerName="glance-db-sync" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.579641 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.582376 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.585827 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.586109 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-7pm5h" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.586107 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.596326 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.597803 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.603655 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695156 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-lib-modules\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695204 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695219 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-nvme\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695237 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-config-data\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695258 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-sys\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695322 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-httpd-run\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695362 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695434 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6bv2\" (UniqueName: \"kubernetes.io/projected/841997d6-cc3a-408a-aaab-511e018a2063-kube-api-access-g6bv2\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695475 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695507 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-run\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695554 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-lib-modules\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695571 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-logs\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695592 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-config-data\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695614 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-scripts\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695656 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-logs\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695680 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-dev\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695697 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-nvme\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695716 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695740 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-scripts\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695761 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhz7f\" (UniqueName: \"kubernetes.io/projected/ad942cdd-0362-4286-a539-2aa17a0db2d6-kube-api-access-zhz7f\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695794 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-dev\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695817 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695869 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695894 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695916 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-sys\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695954 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-run\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.695970 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-httpd-run\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.696010 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.777272 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:11 crc kubenswrapper[4717]: E1002 14:39:11.777787 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-data dev etc-iscsi etc-nvme glance glance-cache httpd-run kube-api-access-g6bv2 lib-modules logs run scripts sys var-locks-brick], unattached volumes=[], failed to process volumes=[]: context canceled" pod="glance-kuttl-tests/glance-default-single-1" podUID="841997d6-cc3a-408a-aaab-511e018a2063" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797686 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-logs\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797739 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-dev\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797770 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-nvme\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797792 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797818 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-scripts\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797838 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhz7f\" (UniqueName: \"kubernetes.io/projected/ad942cdd-0362-4286-a539-2aa17a0db2d6-kube-api-access-zhz7f\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797863 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-dev\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797882 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797906 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797920 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797945 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-nvme\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797990 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-sys\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797998 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-dev\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798031 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-dev\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.797960 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-sys\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798076 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-run\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798386 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-httpd-run\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798406 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798450 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798477 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-lib-modules\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798274 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-run\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798446 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-lib-modules\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798249 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") device mount path \"/mnt/openstack/pv06\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798226 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") device mount path \"/mnt/openstack/pv03\"" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798516 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798263 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-logs\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798453 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798610 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798627 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-nvme\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798649 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-config-data\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798691 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-sys\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798708 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-httpd-run\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798738 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798751 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-nvme\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798788 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6bv2\" (UniqueName: \"kubernetes.io/projected/841997d6-cc3a-408a-aaab-511e018a2063-kube-api-access-g6bv2\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798793 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-httpd-run\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798812 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-sys\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798829 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798858 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-run\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798880 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-lib-modules\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798882 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") device mount path \"/mnt/openstack/pv04\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.799140 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-httpd-run\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.799219 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") device mount path \"/mnt/openstack/pv10\"" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.798790 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.799579 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-run\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.799677 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-lib-modules\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.799734 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-logs\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.799787 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-config-data\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.799842 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-scripts\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.802360 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-logs\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.806315 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-config-data\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.818596 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-scripts\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.819401 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhz7f\" (UniqueName: \"kubernetes.io/projected/ad942cdd-0362-4286-a539-2aa17a0db2d6-kube-api-access-zhz7f\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.819603 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-config-data\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.836743 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-scripts\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.837997 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.838065 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.838123 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.838500 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-0\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.839576 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6bv2\" (UniqueName: \"kubernetes.io/projected/841997d6-cc3a-408a-aaab-511e018a2063-kube-api-access-g6bv2\") pod \"glance-default-single-1\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:11 crc kubenswrapper[4717]: I1002 14:39:11.897198 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.053692 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.074000 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103497 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-dev\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103546 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-nvme\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103576 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-config-data\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103617 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103673 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103692 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6bv2\" (UniqueName: \"kubernetes.io/projected/841997d6-cc3a-408a-aaab-511e018a2063-kube-api-access-g6bv2\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103711 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-scripts\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103734 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-iscsi\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103760 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-logs\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103781 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-sys\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103827 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-var-locks-brick\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103846 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-httpd-run\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103868 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-lib-modules\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.103883 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-run\") pod \"841997d6-cc3a-408a-aaab-511e018a2063\" (UID: \"841997d6-cc3a-408a-aaab-511e018a2063\") " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.104279 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-run" (OuterVolumeSpecName: "run") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.104315 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.104686 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-logs" (OuterVolumeSpecName: "logs") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.104711 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-sys" (OuterVolumeSpecName: "sys") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.104727 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.104894 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.104966 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.105028 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-dev" (OuterVolumeSpecName: "dev") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.107091 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.111817 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/841997d6-cc3a-408a-aaab-511e018a2063-kube-api-access-g6bv2" (OuterVolumeSpecName: "kube-api-access-g6bv2") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "kube-api-access-g6bv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.123025 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance-cache") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.133679 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.133758 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-scripts" (OuterVolumeSpecName: "scripts") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.133848 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-config-data" (OuterVolumeSpecName: "config-data") pod "841997d6-cc3a-408a-aaab-511e018a2063" (UID: "841997d6-cc3a-408a-aaab-511e018a2063"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.166706 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:12 crc kubenswrapper[4717]: W1002 14:39:12.180392 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad942cdd_0362_4286_a539_2aa17a0db2d6.slice/crio-704d17bd10671c184b222bfebe5799193b99c93872080c2cf2f17d010511bf5a WatchSource:0}: Error finding container 704d17bd10671c184b222bfebe5799193b99c93872080c2cf2f17d010511bf5a: Status 404 returned error can't find the container with id 704d17bd10671c184b222bfebe5799193b99c93872080c2cf2f17d010511bf5a Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206491 4717 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-logs\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206523 4717 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-sys\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206536 4717 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206545 4717 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/841997d6-cc3a-408a-aaab-511e018a2063-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206556 4717 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206564 4717 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206573 4717 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-dev\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206582 4717 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206590 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206625 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206639 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206648 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6bv2\" (UniqueName: \"kubernetes.io/projected/841997d6-cc3a-408a-aaab-511e018a2063-kube-api-access-g6bv2\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206657 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841997d6-cc3a-408a-aaab-511e018a2063-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.206666 4717 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/841997d6-cc3a-408a-aaab-511e018a2063-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.222763 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.230466 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.308716 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:12 crc kubenswrapper[4717]: I1002 14:39:12.308752 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.064844 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.064876 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ad942cdd-0362-4286-a539-2aa17a0db2d6","Type":"ContainerStarted","Data":"ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42"} Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.065976 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ad942cdd-0362-4286-a539-2aa17a0db2d6","Type":"ContainerStarted","Data":"11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938"} Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.066017 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ad942cdd-0362-4286-a539-2aa17a0db2d6","Type":"ContainerStarted","Data":"704d17bd10671c184b222bfebe5799193b99c93872080c2cf2f17d010511bf5a"} Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.143489 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=3.1434678209999998 podStartE2EDuration="3.143467821s" podCreationTimestamp="2025-10-02 14:39:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:39:13.110495787 +0000 UTC m=+1103.962350263" watchObservedRunningTime="2025-10-02 14:39:13.143467821 +0000 UTC m=+1103.995322307" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.167143 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.170953 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.206569 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.208041 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.228245 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.238344 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339517 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-config-data\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339567 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-dev\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339596 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339673 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339706 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-scripts\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339744 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z76h\" (UniqueName: \"kubernetes.io/projected/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-kube-api-access-7z76h\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339777 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-sys\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339816 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-nvme\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339836 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-run\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339866 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339892 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-logs\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.339984 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.340004 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-lib-modules\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.340024 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-httpd-run\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.340106 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") device mount path \"/mnt/openstack/pv10\"" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.362204 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441188 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-logs\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441458 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441478 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-lib-modules\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441494 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-httpd-run\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441526 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-config-data\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441541 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-dev\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441562 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441562 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-lib-modules\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441582 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-scripts\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441623 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z76h\" (UniqueName: \"kubernetes.io/projected/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-kube-api-access-7z76h\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441647 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-sys\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441676 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-nvme\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441707 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-run\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441738 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441905 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") device mount path \"/mnt/openstack/pv03\"" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441914 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-logs\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.442014 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-sys\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.441537 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.442146 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-dev\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.442207 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-nvme\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.442248 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.442274 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-run\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.442572 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-httpd-run\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.445246 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-scripts\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.447041 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-config-data\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.458592 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.462148 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z76h\" (UniqueName: \"kubernetes.io/projected/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-kube-api-access-7z76h\") pod \"glance-default-single-1\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:13 crc kubenswrapper[4717]: I1002 14:39:13.549093 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:14 crc kubenswrapper[4717]: I1002 14:39:14.002129 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:14 crc kubenswrapper[4717]: I1002 14:39:14.071740 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c","Type":"ContainerStarted","Data":"3751720a89ba1c064a18da729ba5545d236e5d2b3f3bf2ff5c8ab0aae04b421c"} Oct 02 14:39:14 crc kubenswrapper[4717]: I1002 14:39:14.847190 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="841997d6-cc3a-408a-aaab-511e018a2063" path="/var/lib/kubelet/pods/841997d6-cc3a-408a-aaab-511e018a2063/volumes" Oct 02 14:39:15 crc kubenswrapper[4717]: I1002 14:39:15.084926 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c","Type":"ContainerStarted","Data":"878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563"} Oct 02 14:39:15 crc kubenswrapper[4717]: I1002 14:39:15.086198 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c","Type":"ContainerStarted","Data":"f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e"} Oct 02 14:39:15 crc kubenswrapper[4717]: I1002 14:39:15.116037 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-1" podStartSLOduration=2.116007391 podStartE2EDuration="2.116007391s" podCreationTimestamp="2025-10-02 14:39:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:39:15.115253292 +0000 UTC m=+1105.967107738" watchObservedRunningTime="2025-10-02 14:39:15.116007391 +0000 UTC m=+1105.967861837" Oct 02 14:39:18 crc kubenswrapper[4717]: I1002 14:39:18.620260 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:39:18 crc kubenswrapper[4717]: I1002 14:39:18.621996 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:39:21 crc kubenswrapper[4717]: I1002 14:39:21.897622 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:21 crc kubenswrapper[4717]: I1002 14:39:21.898329 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:21 crc kubenswrapper[4717]: I1002 14:39:21.929752 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:21 crc kubenswrapper[4717]: I1002 14:39:21.942073 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:22 crc kubenswrapper[4717]: I1002 14:39:22.145696 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:22 crc kubenswrapper[4717]: I1002 14:39:22.145751 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:23 crc kubenswrapper[4717]: I1002 14:39:23.549730 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:23 crc kubenswrapper[4717]: I1002 14:39:23.549819 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:23 crc kubenswrapper[4717]: I1002 14:39:23.578525 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:23 crc kubenswrapper[4717]: I1002 14:39:23.588591 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:24 crc kubenswrapper[4717]: I1002 14:39:24.162026 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:24 crc kubenswrapper[4717]: I1002 14:39:24.162057 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:24 crc kubenswrapper[4717]: I1002 14:39:24.535620 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:24 crc kubenswrapper[4717]: I1002 14:39:24.536020 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:39:24 crc kubenswrapper[4717]: I1002 14:39:24.539598 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:26 crc kubenswrapper[4717]: I1002 14:39:26.187492 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:39:26 crc kubenswrapper[4717]: I1002 14:39:26.187825 4717 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 02 14:39:26 crc kubenswrapper[4717]: I1002 14:39:26.268602 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:26 crc kubenswrapper[4717]: I1002 14:39:26.272460 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:39:26 crc kubenswrapper[4717]: I1002 14:39:26.342099 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:26 crc kubenswrapper[4717]: I1002 14:39:26.342353 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerName="glance-log" containerID="cri-o://11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938" gracePeriod=30 Oct 02 14:39:26 crc kubenswrapper[4717]: I1002 14:39:26.342449 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerName="glance-httpd" containerID="cri-o://ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42" gracePeriod=30 Oct 02 14:39:27 crc kubenswrapper[4717]: I1002 14:39:27.196531 4717 generic.go:334] "Generic (PLEG): container finished" podID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerID="11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938" exitCode=143 Oct 02 14:39:27 crc kubenswrapper[4717]: I1002 14:39:27.196643 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ad942cdd-0362-4286-a539-2aa17a0db2d6","Type":"ContainerDied","Data":"11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938"} Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.868195 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905444 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-dev\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905534 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-nvme\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905572 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-iscsi\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905635 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-httpd-run\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905658 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-config-data\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905688 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905719 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-lib-modules\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905751 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905796 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-var-locks-brick\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905828 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-scripts\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905859 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-sys\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905882 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-run\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905957 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhz7f\" (UniqueName: \"kubernetes.io/projected/ad942cdd-0362-4286-a539-2aa17a0db2d6-kube-api-access-zhz7f\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.905994 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-logs\") pod \"ad942cdd-0362-4286-a539-2aa17a0db2d6\" (UID: \"ad942cdd-0362-4286-a539-2aa17a0db2d6\") " Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.907098 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-logs" (OuterVolumeSpecName: "logs") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.907151 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-dev" (OuterVolumeSpecName: "dev") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.907177 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.907197 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.907460 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.908054 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.908165 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-run" (OuterVolumeSpecName: "run") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.908164 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-sys" (OuterVolumeSpecName: "sys") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.908243 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.913721 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad942cdd-0362-4286-a539-2aa17a0db2d6-kube-api-access-zhz7f" (OuterVolumeSpecName: "kube-api-access-zhz7f") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "kube-api-access-zhz7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.915410 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance-cache") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.916340 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.919798 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-scripts" (OuterVolumeSpecName: "scripts") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:39:29 crc kubenswrapper[4717]: I1002 14:39:29.955045 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-config-data" (OuterVolumeSpecName: "config-data") pod "ad942cdd-0362-4286-a539-2aa17a0db2d6" (UID: "ad942cdd-0362-4286-a539-2aa17a0db2d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007269 4717 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007330 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007343 4717 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007355 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007367 4717 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-sys\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007377 4717 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007389 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhz7f\" (UniqueName: \"kubernetes.io/projected/ad942cdd-0362-4286-a539-2aa17a0db2d6-kube-api-access-zhz7f\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007401 4717 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-logs\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007411 4717 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-dev\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007421 4717 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007432 4717 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ad942cdd-0362-4286-a539-2aa17a0db2d6-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007442 4717 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad942cdd-0362-4286-a539-2aa17a0db2d6-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007452 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad942cdd-0362-4286-a539-2aa17a0db2d6-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.007468 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.030187 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.040869 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.110026 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.110094 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.226473 4717 generic.go:334] "Generic (PLEG): container finished" podID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerID="ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42" exitCode=0 Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.226545 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ad942cdd-0362-4286-a539-2aa17a0db2d6","Type":"ContainerDied","Data":"ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42"} Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.226583 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.226616 4717 scope.go:117] "RemoveContainer" containerID="ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.226600 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ad942cdd-0362-4286-a539-2aa17a0db2d6","Type":"ContainerDied","Data":"704d17bd10671c184b222bfebe5799193b99c93872080c2cf2f17d010511bf5a"} Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.258829 4717 scope.go:117] "RemoveContainer" containerID="11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.266110 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.281530 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.283851 4717 scope.go:117] "RemoveContainer" containerID="ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42" Oct 02 14:39:30 crc kubenswrapper[4717]: E1002 14:39:30.288391 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42\": container with ID starting with ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42 not found: ID does not exist" containerID="ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.288442 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42"} err="failed to get container status \"ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42\": rpc error: code = NotFound desc = could not find container \"ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42\": container with ID starting with ddad9701868280bcc143f9185d6b390d708323bc04b478a93f3757cdcada5e42 not found: ID does not exist" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.288477 4717 scope.go:117] "RemoveContainer" containerID="11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938" Oct 02 14:39:30 crc kubenswrapper[4717]: E1002 14:39:30.288881 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938\": container with ID starting with 11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938 not found: ID does not exist" containerID="11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.288919 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938"} err="failed to get container status \"11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938\": rpc error: code = NotFound desc = could not find container \"11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938\": container with ID starting with 11b641b3bad5cb14eda23507b599741fb55dd199b5c0057c93d22bc21e0b3938 not found: ID does not exist" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.308296 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:30 crc kubenswrapper[4717]: E1002 14:39:30.308646 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerName="glance-httpd" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.308670 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerName="glance-httpd" Oct 02 14:39:30 crc kubenswrapper[4717]: E1002 14:39:30.308705 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerName="glance-log" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.308715 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerName="glance-log" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.308858 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerName="glance-httpd" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.308874 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" containerName="glance-log" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.309753 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.327545 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.414142 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.516050 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-dev\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.516378 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.516480 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk8g7\" (UniqueName: \"kubernetes.io/projected/889101bf-86a3-4d78-b1e9-328567d4cc7e-kube-api-access-jk8g7\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.516594 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-scripts\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.516682 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-sys\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.516760 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-logs\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.516825 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-httpd-run\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.516899 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.517159 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-config-data\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.517303 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.517366 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-lib-modules\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.517452 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-nvme\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.517503 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.517559 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-run\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.518157 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") device mount path \"/mnt/openstack/pv06\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.537975 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.619817 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk8g7\" (UniqueName: \"kubernetes.io/projected/889101bf-86a3-4d78-b1e9-328567d4cc7e-kube-api-access-jk8g7\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.620553 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-scripts\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.620657 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-sys\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.620734 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-logs\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.620798 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-httpd-run\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.620898 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.620988 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-config-data\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621087 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-lib-modules\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621180 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-nvme\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621243 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-logs\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621251 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621307 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-run\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621454 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-dev\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621481 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-httpd-run\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621333 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621396 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621397 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-nvme\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621406 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-run\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621374 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-lib-modules\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621518 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-dev\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.620902 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-sys\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621599 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.621730 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") device mount path \"/mnt/openstack/pv04\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.626436 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-scripts\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.627522 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-config-data\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.640616 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.645740 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk8g7\" (UniqueName: \"kubernetes.io/projected/889101bf-86a3-4d78-b1e9-328567d4cc7e-kube-api-access-jk8g7\") pod \"glance-default-single-0\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.847258 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad942cdd-0362-4286-a539-2aa17a0db2d6" path="/var/lib/kubelet/pods/ad942cdd-0362-4286-a539-2aa17a0db2d6/volumes" Oct 02 14:39:30 crc kubenswrapper[4717]: I1002 14:39:30.924335 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:31 crc kubenswrapper[4717]: I1002 14:39:31.150318 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:31 crc kubenswrapper[4717]: W1002 14:39:31.154718 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod889101bf_86a3_4d78_b1e9_328567d4cc7e.slice/crio-9718ecddab549f64f6d0becb586a3a2a6cc602d8d4314468b8bddd034d92291e WatchSource:0}: Error finding container 9718ecddab549f64f6d0becb586a3a2a6cc602d8d4314468b8bddd034d92291e: Status 404 returned error can't find the container with id 9718ecddab549f64f6d0becb586a3a2a6cc602d8d4314468b8bddd034d92291e Oct 02 14:39:31 crc kubenswrapper[4717]: I1002 14:39:31.234954 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"889101bf-86a3-4d78-b1e9-328567d4cc7e","Type":"ContainerStarted","Data":"9718ecddab549f64f6d0becb586a3a2a6cc602d8d4314468b8bddd034d92291e"} Oct 02 14:39:32 crc kubenswrapper[4717]: I1002 14:39:32.244679 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"889101bf-86a3-4d78-b1e9-328567d4cc7e","Type":"ContainerStarted","Data":"336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527"} Oct 02 14:39:32 crc kubenswrapper[4717]: I1002 14:39:32.245755 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"889101bf-86a3-4d78-b1e9-328567d4cc7e","Type":"ContainerStarted","Data":"42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e"} Oct 02 14:39:32 crc kubenswrapper[4717]: I1002 14:39:32.266202 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=2.266184854 podStartE2EDuration="2.266184854s" podCreationTimestamp="2025-10-02 14:39:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:39:32.264135909 +0000 UTC m=+1123.115990355" watchObservedRunningTime="2025-10-02 14:39:32.266184854 +0000 UTC m=+1123.118039290" Oct 02 14:39:40 crc kubenswrapper[4717]: I1002 14:39:40.924885 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:40 crc kubenswrapper[4717]: I1002 14:39:40.926041 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:40 crc kubenswrapper[4717]: I1002 14:39:40.953604 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:40 crc kubenswrapper[4717]: I1002 14:39:40.983399 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:41 crc kubenswrapper[4717]: I1002 14:39:41.328651 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:41 crc kubenswrapper[4717]: I1002 14:39:41.328713 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:43 crc kubenswrapper[4717]: I1002 14:39:43.199201 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:43 crc kubenswrapper[4717]: I1002 14:39:43.286783 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:39:48 crc kubenswrapper[4717]: I1002 14:39:48.619867 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:39:48 crc kubenswrapper[4717]: I1002 14:39:48.620361 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:39:48 crc kubenswrapper[4717]: I1002 14:39:48.620404 4717 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:39:48 crc kubenswrapper[4717]: I1002 14:39:48.621010 4717 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"52384e0f02272cfcc1d37cbacff5ecff9bba1bac6264b24fc5eae60641b49d30"} pod="openshift-machine-config-operator/machine-config-daemon-sk55f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 14:39:48 crc kubenswrapper[4717]: I1002 14:39:48.621061 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" containerID="cri-o://52384e0f02272cfcc1d37cbacff5ecff9bba1bac6264b24fc5eae60641b49d30" gracePeriod=600 Oct 02 14:39:49 crc kubenswrapper[4717]: I1002 14:39:49.394048 4717 generic.go:334] "Generic (PLEG): container finished" podID="405aba30-0ff3-4fca-a5da-09c35263665d" containerID="52384e0f02272cfcc1d37cbacff5ecff9bba1bac6264b24fc5eae60641b49d30" exitCode=0 Oct 02 14:39:49 crc kubenswrapper[4717]: I1002 14:39:49.394146 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerDied","Data":"52384e0f02272cfcc1d37cbacff5ecff9bba1bac6264b24fc5eae60641b49d30"} Oct 02 14:39:49 crc kubenswrapper[4717]: I1002 14:39:49.394487 4717 scope.go:117] "RemoveContainer" containerID="dc26c4013ef3dfdebc8448f602fd80cfb07a36d1bd8014fcfc1f4769626fbe3f" Oct 02 14:39:50 crc kubenswrapper[4717]: I1002 14:39:50.406708 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"f4210ad73a8a6d0d88db04fcb399f0863d41d8468e02867425cd0ab6020cf084"} Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.499427 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-8n8js"] Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.507244 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-8n8js"] Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.568061 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance6163-account-delete-ds6dr"] Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.569328 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.600650 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance6163-account-delete-ds6dr"] Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.605179 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zl7n\" (UniqueName: \"kubernetes.io/projected/64187993-c038-4e9b-b7a1-a3a700f51383-kube-api-access-7zl7n\") pod \"glance6163-account-delete-ds6dr\" (UID: \"64187993-c038-4e9b-b7a1-a3a700f51383\") " pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.650012 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.650303 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-log" containerID="cri-o://42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e" gracePeriod=30 Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.650702 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-httpd" containerID="cri-o://336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527" gracePeriod=30 Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.655236 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.655489 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerName="glance-log" containerID="cri-o://f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e" gracePeriod=30 Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.655644 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerName="glance-httpd" containerID="cri-o://878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563" gracePeriod=30 Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.706922 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zl7n\" (UniqueName: \"kubernetes.io/projected/64187993-c038-4e9b-b7a1-a3a700f51383-kube-api-access-7zl7n\") pod \"glance6163-account-delete-ds6dr\" (UID: \"64187993-c038-4e9b-b7a1-a3a700f51383\") " pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.728471 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zl7n\" (UniqueName: \"kubernetes.io/projected/64187993-c038-4e9b-b7a1-a3a700f51383-kube-api-access-7zl7n\") pod \"glance6163-account-delete-ds6dr\" (UID: \"64187993-c038-4e9b-b7a1-a3a700f51383\") " pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.753733 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.753977 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/openstackclient" podUID="aacc45bd-80df-4155-b773-d7fae5f6aaa5" containerName="openstackclient" containerID="cri-o://f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f" gracePeriod=30 Oct 02 14:39:59 crc kubenswrapper[4717]: I1002 14:39:59.906015 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.182255 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.213194 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config\") pod \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.213272 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-scripts\") pod \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.213295 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7zj6\" (UniqueName: \"kubernetes.io/projected/aacc45bd-80df-4155-b773-d7fae5f6aaa5-kube-api-access-q7zj6\") pod \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.213367 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config-secret\") pod \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\" (UID: \"aacc45bd-80df-4155-b773-d7fae5f6aaa5\") " Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.214279 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-scripts" (OuterVolumeSpecName: "openstack-scripts") pod "aacc45bd-80df-4155-b773-d7fae5f6aaa5" (UID: "aacc45bd-80df-4155-b773-d7fae5f6aaa5"). InnerVolumeSpecName "openstack-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.218978 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aacc45bd-80df-4155-b773-d7fae5f6aaa5-kube-api-access-q7zj6" (OuterVolumeSpecName: "kube-api-access-q7zj6") pod "aacc45bd-80df-4155-b773-d7fae5f6aaa5" (UID: "aacc45bd-80df-4155-b773-d7fae5f6aaa5"). InnerVolumeSpecName "kube-api-access-q7zj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.235761 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "aacc45bd-80df-4155-b773-d7fae5f6aaa5" (UID: "aacc45bd-80df-4155-b773-d7fae5f6aaa5"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.235907 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "aacc45bd-80df-4155-b773-d7fae5f6aaa5" (UID: "aacc45bd-80df-4155-b773-d7fae5f6aaa5"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.315299 4717 reconciler_common.go:293] "Volume detached for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.315329 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7zj6\" (UniqueName: \"kubernetes.io/projected/aacc45bd-80df-4155-b773-d7fae5f6aaa5-kube-api-access-q7zj6\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.315341 4717 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.315351 4717 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/aacc45bd-80df-4155-b773-d7fae5f6aaa5-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.352002 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance6163-account-delete-ds6dr"] Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.478090 4717 generic.go:334] "Generic (PLEG): container finished" podID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerID="42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e" exitCode=143 Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.478158 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"889101bf-86a3-4d78-b1e9-328567d4cc7e","Type":"ContainerDied","Data":"42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e"} Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.479823 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" event={"ID":"64187993-c038-4e9b-b7a1-a3a700f51383","Type":"ContainerStarted","Data":"fc9ef959e991e0f1f79f920eda2479c41994f466bc67c75884ffab94fc00aa77"} Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.479874 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" event={"ID":"64187993-c038-4e9b-b7a1-a3a700f51383","Type":"ContainerStarted","Data":"d4af6c58f57384b0d223fa3a555d5ec337becd3bea920040d152811773695bfc"} Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.481674 4717 generic.go:334] "Generic (PLEG): container finished" podID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerID="f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e" exitCode=143 Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.481730 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c","Type":"ContainerDied","Data":"f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e"} Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.483269 4717 generic.go:334] "Generic (PLEG): container finished" podID="aacc45bd-80df-4155-b773-d7fae5f6aaa5" containerID="f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f" exitCode=143 Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.483303 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"aacc45bd-80df-4155-b773-d7fae5f6aaa5","Type":"ContainerDied","Data":"f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f"} Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.483324 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"aacc45bd-80df-4155-b773-d7fae5f6aaa5","Type":"ContainerDied","Data":"e1cb6fa95b7d18af7785133d9da0a25f059a6e3ddc24a7d0040aaf21f697e55a"} Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.483340 4717 scope.go:117] "RemoveContainer" containerID="f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.483447 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.502308 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" podStartSLOduration=1.5022802039999998 podStartE2EDuration="1.502280204s" podCreationTimestamp="2025-10-02 14:39:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:40:00.491685079 +0000 UTC m=+1151.343539525" watchObservedRunningTime="2025-10-02 14:40:00.502280204 +0000 UTC m=+1151.354134650" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.512898 4717 scope.go:117] "RemoveContainer" containerID="f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f" Oct 02 14:40:00 crc kubenswrapper[4717]: E1002 14:40:00.513564 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f\": container with ID starting with f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f not found: ID does not exist" containerID="f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.513632 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f"} err="failed to get container status \"f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f\": rpc error: code = NotFound desc = could not find container \"f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f\": container with ID starting with f1e1da91aea0faaa5dde48db4bfb56982f36d3e61a5445d413deb23d6382488f not found: ID does not exist" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.514686 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.519852 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.846983 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28f02d25-7483-4463-953d-c83788015501" path="/var/lib/kubelet/pods/28f02d25-7483-4463-953d-c83788015501/volumes" Oct 02 14:40:00 crc kubenswrapper[4717]: I1002 14:40:00.848170 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aacc45bd-80df-4155-b773-d7fae5f6aaa5" path="/var/lib/kubelet/pods/aacc45bd-80df-4155-b773-d7fae5f6aaa5/volumes" Oct 02 14:40:01 crc kubenswrapper[4717]: I1002 14:40:01.509694 4717 generic.go:334] "Generic (PLEG): container finished" podID="64187993-c038-4e9b-b7a1-a3a700f51383" containerID="fc9ef959e991e0f1f79f920eda2479c41994f466bc67c75884ffab94fc00aa77" exitCode=0 Oct 02 14:40:01 crc kubenswrapper[4717]: I1002 14:40:01.509842 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" event={"ID":"64187993-c038-4e9b-b7a1-a3a700f51383","Type":"ContainerDied","Data":"fc9ef959e991e0f1f79f920eda2479c41994f466bc67c75884ffab94fc00aa77"} Oct 02 14:40:02 crc kubenswrapper[4717]: I1002 14:40:02.836313 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" Oct 02 14:40:02 crc kubenswrapper[4717]: I1002 14:40:02.840765 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="glance-kuttl-tests/glance-default-single-0" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-log" probeResult="failure" output="Get \"http://10.217.0.106:9292/healthcheck\": read tcp 10.217.0.2:46864->10.217.0.106:9292: read: connection reset by peer" Oct 02 14:40:02 crc kubenswrapper[4717]: I1002 14:40:02.840763 4717 prober.go:107] "Probe failed" probeType="Readiness" pod="glance-kuttl-tests/glance-default-single-0" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-httpd" probeResult="failure" output="Get \"http://10.217.0.106:9292/healthcheck\": read tcp 10.217.0.2:46862->10.217.0.106:9292: read: connection reset by peer" Oct 02 14:40:02 crc kubenswrapper[4717]: I1002 14:40:02.864273 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zl7n\" (UniqueName: \"kubernetes.io/projected/64187993-c038-4e9b-b7a1-a3a700f51383-kube-api-access-7zl7n\") pod \"64187993-c038-4e9b-b7a1-a3a700f51383\" (UID: \"64187993-c038-4e9b-b7a1-a3a700f51383\") " Oct 02 14:40:02 crc kubenswrapper[4717]: I1002 14:40:02.902473 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64187993-c038-4e9b-b7a1-a3a700f51383-kube-api-access-7zl7n" (OuterVolumeSpecName: "kube-api-access-7zl7n") pod "64187993-c038-4e9b-b7a1-a3a700f51383" (UID: "64187993-c038-4e9b-b7a1-a3a700f51383"). InnerVolumeSpecName "kube-api-access-7zl7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:02 crc kubenswrapper[4717]: I1002 14:40:02.966632 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zl7n\" (UniqueName: \"kubernetes.io/projected/64187993-c038-4e9b-b7a1-a3a700f51383-kube-api-access-7zl7n\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.190681 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.246142 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.272768 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-logs\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.272810 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-lib-modules\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.272846 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-iscsi\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.272876 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-httpd-run\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.272895 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-httpd-run\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.272916 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-var-locks-brick\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.272950 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-run\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.273203 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.273784 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-logs" (OuterVolumeSpecName: "logs") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.273816 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274076 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-var-locks-brick\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274106 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274130 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-run\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274165 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z76h\" (UniqueName: \"kubernetes.io/projected/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-kube-api-access-7z76h\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274193 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-config-data\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274214 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-sys\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274238 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-nvme\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274264 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-lib-modules\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274299 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-dev\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274355 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-config-data\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274378 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-dev\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274426 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk8g7\" (UniqueName: \"kubernetes.io/projected/889101bf-86a3-4d78-b1e9-328567d4cc7e-kube-api-access-jk8g7\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274902 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-scripts\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275070 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-iscsi\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275101 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-logs\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275219 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275249 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-scripts\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275291 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-sys\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275314 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275330 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"889101bf-86a3-4d78-b1e9-328567d4cc7e\" (UID: \"889101bf-86a3-4d78-b1e9-328567d4cc7e\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275357 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-nvme\") pod \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\" (UID: \"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c\") " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275892 4717 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-logs\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275906 4717 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275918 4717 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274182 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-run" (OuterVolumeSpecName: "run") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274223 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274428 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274461 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274505 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274567 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-dev" (OuterVolumeSpecName: "dev") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274585 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.274634 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-sys" (OuterVolumeSpecName: "sys") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.275967 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.276053 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.276377 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-run" (OuterVolumeSpecName: "run") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.276565 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-sys" (OuterVolumeSpecName: "sys") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.276620 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.276875 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-logs" (OuterVolumeSpecName: "logs") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.276918 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-dev" (OuterVolumeSpecName: "dev") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.278245 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/889101bf-86a3-4d78-b1e9-328567d4cc7e-kube-api-access-jk8g7" (OuterVolumeSpecName: "kube-api-access-jk8g7") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "kube-api-access-jk8g7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.279717 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-scripts" (OuterVolumeSpecName: "scripts") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.280091 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance-cache") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.280920 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-kube-api-access-7z76h" (OuterVolumeSpecName: "kube-api-access-7z76h") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "kube-api-access-7z76h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.282134 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance-cache") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.282495 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.286841 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.301655 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-scripts" (OuterVolumeSpecName: "scripts") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.318562 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-config-data" (OuterVolumeSpecName: "config-data") pod "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" (UID: "b08bcfb1-2959-42c1-bf45-9b210d9a0e4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.333613 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-config-data" (OuterVolumeSpecName: "config-data") pod "889101bf-86a3-4d78-b1e9-328567d4cc7e" (UID: "889101bf-86a3-4d78-b1e9-328567d4cc7e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377637 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z76h\" (UniqueName: \"kubernetes.io/projected/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-kube-api-access-7z76h\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377668 4717 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-sys\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377680 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377690 4717 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377699 4717 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377707 4717 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-dev\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377716 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377726 4717 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-dev\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377738 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk8g7\" (UniqueName: \"kubernetes.io/projected/889101bf-86a3-4d78-b1e9-328567d4cc7e-kube-api-access-jk8g7\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377748 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/889101bf-86a3-4d78-b1e9-328567d4cc7e-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377756 4717 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377765 4717 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-logs\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377799 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377809 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377817 4717 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-sys\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377831 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377844 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377854 4717 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377865 4717 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377873 4717 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/889101bf-86a3-4d78-b1e9-328567d4cc7e-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377881 4717 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377897 4717 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377907 4717 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377924 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.377949 4717 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/889101bf-86a3-4d78-b1e9-328567d4cc7e-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.393309 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.393501 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.395837 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.399560 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.480282 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.480312 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.480325 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.480336 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.554208 4717 generic.go:334] "Generic (PLEG): container finished" podID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerID="336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527" exitCode=0 Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.554264 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.554287 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"889101bf-86a3-4d78-b1e9-328567d4cc7e","Type":"ContainerDied","Data":"336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527"} Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.554318 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"889101bf-86a3-4d78-b1e9-328567d4cc7e","Type":"ContainerDied","Data":"9718ecddab549f64f6d0becb586a3a2a6cc602d8d4314468b8bddd034d92291e"} Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.554338 4717 scope.go:117] "RemoveContainer" containerID="336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.556061 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.556060 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance6163-account-delete-ds6dr" event={"ID":"64187993-c038-4e9b-b7a1-a3a700f51383","Type":"ContainerDied","Data":"d4af6c58f57384b0d223fa3a555d5ec337becd3bea920040d152811773695bfc"} Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.556272 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4af6c58f57384b0d223fa3a555d5ec337becd3bea920040d152811773695bfc" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.558664 4717 generic.go:334] "Generic (PLEG): container finished" podID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerID="878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563" exitCode=0 Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.558699 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c","Type":"ContainerDied","Data":"878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563"} Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.558727 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"b08bcfb1-2959-42c1-bf45-9b210d9a0e4c","Type":"ContainerDied","Data":"3751720a89ba1c064a18da729ba5545d236e5d2b3f3bf2ff5c8ab0aae04b421c"} Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.558774 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.582305 4717 scope.go:117] "RemoveContainer" containerID="42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.592124 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.603531 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.610571 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.612211 4717 scope.go:117] "RemoveContainer" containerID="336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:03.612724 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527\": container with ID starting with 336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527 not found: ID does not exist" containerID="336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.612784 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527"} err="failed to get container status \"336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527\": rpc error: code = NotFound desc = could not find container \"336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527\": container with ID starting with 336f9c79c0d9a3fcfca2e2b24824f7e83e3ce67571a0ca2f1ba0c07b1ce63527 not found: ID does not exist" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.612827 4717 scope.go:117] "RemoveContainer" containerID="42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:03.613247 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e\": container with ID starting with 42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e not found: ID does not exist" containerID="42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.613282 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e"} err="failed to get container status \"42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e\": rpc error: code = NotFound desc = could not find container \"42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e\": container with ID starting with 42a76f98c9cc718be90095dbefb8541e4229f77ae9dcd0c55b5394c9f322747e not found: ID does not exist" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.613305 4717 scope.go:117] "RemoveContainer" containerID="878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.616618 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.635221 4717 scope.go:117] "RemoveContainer" containerID="f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.657781 4717 scope.go:117] "RemoveContainer" containerID="878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:03.658616 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563\": container with ID starting with 878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563 not found: ID does not exist" containerID="878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.658665 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563"} err="failed to get container status \"878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563\": rpc error: code = NotFound desc = could not find container \"878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563\": container with ID starting with 878f09f2c2bd62da41a7e9a34623db206971d3250dc8e974c78d92dfd6014563 not found: ID does not exist" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.658696 4717 scope.go:117] "RemoveContainer" containerID="f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:03.659187 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e\": container with ID starting with f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e not found: ID does not exist" containerID="f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:03.659220 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e"} err="failed to get container status \"f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e\": rpc error: code = NotFound desc = could not find container \"f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e\": container with ID starting with f1c1017bf934af586d832f8c4a033658213a5b4633552125190386525d33854e not found: ID does not exist" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.596310 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance6163-account-delete-ds6dr"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.604144 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-qn6cv"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.610407 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-6163-account-create-fww5s"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.615837 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-qn6cv"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.621214 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-6163-account-create-fww5s"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.625155 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance6163-account-delete-ds6dr"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.723621 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-ng487"] Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:04.723985 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerName="glance-log" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724005 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerName="glance-log" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:04.724018 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-httpd" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724026 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-httpd" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:04.724037 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64187993-c038-4e9b-b7a1-a3a700f51383" containerName="mariadb-account-delete" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724044 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="64187993-c038-4e9b-b7a1-a3a700f51383" containerName="mariadb-account-delete" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:04.724062 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aacc45bd-80df-4155-b773-d7fae5f6aaa5" containerName="openstackclient" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724070 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="aacc45bd-80df-4155-b773-d7fae5f6aaa5" containerName="openstackclient" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:04.724080 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-log" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724087 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-log" Oct 02 14:40:04 crc kubenswrapper[4717]: E1002 14:40:04.724101 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerName="glance-httpd" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724108 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerName="glance-httpd" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724252 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="64187993-c038-4e9b-b7a1-a3a700f51383" containerName="mariadb-account-delete" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724265 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerName="glance-log" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724275 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" containerName="glance-httpd" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724289 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="aacc45bd-80df-4155-b773-d7fae5f6aaa5" containerName="openstackclient" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724299 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-log" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724309 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" containerName="glance-httpd" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.724993 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-ng487" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.731073 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-ng487"] Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.801079 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwk89\" (UniqueName: \"kubernetes.io/projected/04183c90-0bd9-4aaf-ad60-abd4897a73f5-kube-api-access-bwk89\") pod \"glance-db-create-ng487\" (UID: \"04183c90-0bd9-4aaf-ad60-abd4897a73f5\") " pod="glance-kuttl-tests/glance-db-create-ng487" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.848600 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64187993-c038-4e9b-b7a1-a3a700f51383" path="/var/lib/kubelet/pods/64187993-c038-4e9b-b7a1-a3a700f51383/volumes" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.849203 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ea11fb5-41b6-47b6-9143-979e994ab8ab" path="/var/lib/kubelet/pods/7ea11fb5-41b6-47b6-9143-979e994ab8ab/volumes" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.850079 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="889101bf-86a3-4d78-b1e9-328567d4cc7e" path="/var/lib/kubelet/pods/889101bf-86a3-4d78-b1e9-328567d4cc7e/volumes" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.851400 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b08bcfb1-2959-42c1-bf45-9b210d9a0e4c" path="/var/lib/kubelet/pods/b08bcfb1-2959-42c1-bf45-9b210d9a0e4c/volumes" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.852050 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd3c7e7f-b1a7-40fe-847e-7c01c213273e" path="/var/lib/kubelet/pods/dd3c7e7f-b1a7-40fe-847e-7c01c213273e/volumes" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.902776 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwk89\" (UniqueName: \"kubernetes.io/projected/04183c90-0bd9-4aaf-ad60-abd4897a73f5-kube-api-access-bwk89\") pod \"glance-db-create-ng487\" (UID: \"04183c90-0bd9-4aaf-ad60-abd4897a73f5\") " pod="glance-kuttl-tests/glance-db-create-ng487" Oct 02 14:40:04 crc kubenswrapper[4717]: I1002 14:40:04.923661 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwk89\" (UniqueName: \"kubernetes.io/projected/04183c90-0bd9-4aaf-ad60-abd4897a73f5-kube-api-access-bwk89\") pod \"glance-db-create-ng487\" (UID: \"04183c90-0bd9-4aaf-ad60-abd4897a73f5\") " pod="glance-kuttl-tests/glance-db-create-ng487" Oct 02 14:40:05 crc kubenswrapper[4717]: I1002 14:40:05.040543 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-ng487" Oct 02 14:40:05 crc kubenswrapper[4717]: I1002 14:40:05.262084 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-ng487"] Oct 02 14:40:05 crc kubenswrapper[4717]: W1002 14:40:05.264760 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04183c90_0bd9_4aaf_ad60_abd4897a73f5.slice/crio-cd7c9a2e3a9e21bcbdc4cf98996529b6c206ba9b12e73dca789b859d7825d726 WatchSource:0}: Error finding container cd7c9a2e3a9e21bcbdc4cf98996529b6c206ba9b12e73dca789b859d7825d726: Status 404 returned error can't find the container with id cd7c9a2e3a9e21bcbdc4cf98996529b6c206ba9b12e73dca789b859d7825d726 Oct 02 14:40:05 crc kubenswrapper[4717]: I1002 14:40:05.581312 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-ng487" event={"ID":"04183c90-0bd9-4aaf-ad60-abd4897a73f5","Type":"ContainerStarted","Data":"cb990ee7d49d70135ea27662a6e39aba760d5f6760d12ad8b001aa5149b16035"} Oct 02 14:40:05 crc kubenswrapper[4717]: I1002 14:40:05.581354 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-ng487" event={"ID":"04183c90-0bd9-4aaf-ad60-abd4897a73f5","Type":"ContainerStarted","Data":"cd7c9a2e3a9e21bcbdc4cf98996529b6c206ba9b12e73dca789b859d7825d726"} Oct 02 14:40:05 crc kubenswrapper[4717]: I1002 14:40:05.598223 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-create-ng487" podStartSLOduration=1.598204467 podStartE2EDuration="1.598204467s" podCreationTimestamp="2025-10-02 14:40:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:40:05.596482431 +0000 UTC m=+1156.448336897" watchObservedRunningTime="2025-10-02 14:40:05.598204467 +0000 UTC m=+1156.450058913" Oct 02 14:40:06 crc kubenswrapper[4717]: I1002 14:40:06.590824 4717 generic.go:334] "Generic (PLEG): container finished" podID="04183c90-0bd9-4aaf-ad60-abd4897a73f5" containerID="cb990ee7d49d70135ea27662a6e39aba760d5f6760d12ad8b001aa5149b16035" exitCode=0 Oct 02 14:40:06 crc kubenswrapper[4717]: I1002 14:40:06.590888 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-ng487" event={"ID":"04183c90-0bd9-4aaf-ad60-abd4897a73f5","Type":"ContainerDied","Data":"cb990ee7d49d70135ea27662a6e39aba760d5f6760d12ad8b001aa5149b16035"} Oct 02 14:40:07 crc kubenswrapper[4717]: I1002 14:40:07.903693 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-ng487" Oct 02 14:40:07 crc kubenswrapper[4717]: I1002 14:40:07.945238 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwk89\" (UniqueName: \"kubernetes.io/projected/04183c90-0bd9-4aaf-ad60-abd4897a73f5-kube-api-access-bwk89\") pod \"04183c90-0bd9-4aaf-ad60-abd4897a73f5\" (UID: \"04183c90-0bd9-4aaf-ad60-abd4897a73f5\") " Oct 02 14:40:07 crc kubenswrapper[4717]: I1002 14:40:07.951125 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04183c90-0bd9-4aaf-ad60-abd4897a73f5-kube-api-access-bwk89" (OuterVolumeSpecName: "kube-api-access-bwk89") pod "04183c90-0bd9-4aaf-ad60-abd4897a73f5" (UID: "04183c90-0bd9-4aaf-ad60-abd4897a73f5"). InnerVolumeSpecName "kube-api-access-bwk89". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:08 crc kubenswrapper[4717]: I1002 14:40:08.047159 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwk89\" (UniqueName: \"kubernetes.io/projected/04183c90-0bd9-4aaf-ad60-abd4897a73f5-kube-api-access-bwk89\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:08 crc kubenswrapper[4717]: I1002 14:40:08.608525 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-ng487" event={"ID":"04183c90-0bd9-4aaf-ad60-abd4897a73f5","Type":"ContainerDied","Data":"cd7c9a2e3a9e21bcbdc4cf98996529b6c206ba9b12e73dca789b859d7825d726"} Oct 02 14:40:08 crc kubenswrapper[4717]: I1002 14:40:08.608560 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd7c9a2e3a9e21bcbdc4cf98996529b6c206ba9b12e73dca789b859d7825d726" Oct 02 14:40:08 crc kubenswrapper[4717]: I1002 14:40:08.608598 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-ng487" Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.762270 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-d39f-account-create-rr5vg"] Oct 02 14:40:14 crc kubenswrapper[4717]: E1002 14:40:14.763070 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04183c90-0bd9-4aaf-ad60-abd4897a73f5" containerName="mariadb-database-create" Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.763084 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="04183c90-0bd9-4aaf-ad60-abd4897a73f5" containerName="mariadb-database-create" Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.763245 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="04183c90-0bd9-4aaf-ad60-abd4897a73f5" containerName="mariadb-database-create" Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.764510 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.766734 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.771919 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-d39f-account-create-rr5vg"] Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.842784 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9tjq\" (UniqueName: \"kubernetes.io/projected/f9854dc2-3cbd-4ad9-a949-a3cba007ad07-kube-api-access-j9tjq\") pod \"glance-d39f-account-create-rr5vg\" (UID: \"f9854dc2-3cbd-4ad9-a949-a3cba007ad07\") " pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.944688 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9tjq\" (UniqueName: \"kubernetes.io/projected/f9854dc2-3cbd-4ad9-a949-a3cba007ad07-kube-api-access-j9tjq\") pod \"glance-d39f-account-create-rr5vg\" (UID: \"f9854dc2-3cbd-4ad9-a949-a3cba007ad07\") " pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" Oct 02 14:40:14 crc kubenswrapper[4717]: I1002 14:40:14.964877 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9tjq\" (UniqueName: \"kubernetes.io/projected/f9854dc2-3cbd-4ad9-a949-a3cba007ad07-kube-api-access-j9tjq\") pod \"glance-d39f-account-create-rr5vg\" (UID: \"f9854dc2-3cbd-4ad9-a949-a3cba007ad07\") " pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" Oct 02 14:40:15 crc kubenswrapper[4717]: I1002 14:40:15.096174 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" Oct 02 14:40:15 crc kubenswrapper[4717]: I1002 14:40:15.507114 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-d39f-account-create-rr5vg"] Oct 02 14:40:15 crc kubenswrapper[4717]: W1002 14:40:15.518138 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9854dc2_3cbd_4ad9_a949_a3cba007ad07.slice/crio-6335529771fdc5d20c91a7c1df0a048cb82bcf6974db0e01ca2b392f3910b8b3 WatchSource:0}: Error finding container 6335529771fdc5d20c91a7c1df0a048cb82bcf6974db0e01ca2b392f3910b8b3: Status 404 returned error can't find the container with id 6335529771fdc5d20c91a7c1df0a048cb82bcf6974db0e01ca2b392f3910b8b3 Oct 02 14:40:15 crc kubenswrapper[4717]: I1002 14:40:15.661117 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" event={"ID":"f9854dc2-3cbd-4ad9-a949-a3cba007ad07","Type":"ContainerStarted","Data":"6335529771fdc5d20c91a7c1df0a048cb82bcf6974db0e01ca2b392f3910b8b3"} Oct 02 14:40:16 crc kubenswrapper[4717]: I1002 14:40:16.669370 4717 generic.go:334] "Generic (PLEG): container finished" podID="f9854dc2-3cbd-4ad9-a949-a3cba007ad07" containerID="695395e57f33fd1a5ea70ffa8f8dbdae8444c5117aa81724194e1fcc4fd0cdf0" exitCode=0 Oct 02 14:40:16 crc kubenswrapper[4717]: I1002 14:40:16.669445 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" event={"ID":"f9854dc2-3cbd-4ad9-a949-a3cba007ad07","Type":"ContainerDied","Data":"695395e57f33fd1a5ea70ffa8f8dbdae8444c5117aa81724194e1fcc4fd0cdf0"} Oct 02 14:40:18 crc kubenswrapper[4717]: I1002 14:40:18.006179 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" Oct 02 14:40:18 crc kubenswrapper[4717]: I1002 14:40:18.089394 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9tjq\" (UniqueName: \"kubernetes.io/projected/f9854dc2-3cbd-4ad9-a949-a3cba007ad07-kube-api-access-j9tjq\") pod \"f9854dc2-3cbd-4ad9-a949-a3cba007ad07\" (UID: \"f9854dc2-3cbd-4ad9-a949-a3cba007ad07\") " Oct 02 14:40:18 crc kubenswrapper[4717]: I1002 14:40:18.109128 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9854dc2-3cbd-4ad9-a949-a3cba007ad07-kube-api-access-j9tjq" (OuterVolumeSpecName: "kube-api-access-j9tjq") pod "f9854dc2-3cbd-4ad9-a949-a3cba007ad07" (UID: "f9854dc2-3cbd-4ad9-a949-a3cba007ad07"). InnerVolumeSpecName "kube-api-access-j9tjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:18 crc kubenswrapper[4717]: I1002 14:40:18.191109 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9tjq\" (UniqueName: \"kubernetes.io/projected/f9854dc2-3cbd-4ad9-a949-a3cba007ad07-kube-api-access-j9tjq\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:18 crc kubenswrapper[4717]: I1002 14:40:18.682154 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" event={"ID":"f9854dc2-3cbd-4ad9-a949-a3cba007ad07","Type":"ContainerDied","Data":"6335529771fdc5d20c91a7c1df0a048cb82bcf6974db0e01ca2b392f3910b8b3"} Oct 02 14:40:18 crc kubenswrapper[4717]: I1002 14:40:18.682193 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6335529771fdc5d20c91a7c1df0a048cb82bcf6974db0e01ca2b392f3910b8b3" Oct 02 14:40:18 crc kubenswrapper[4717]: I1002 14:40:18.682260 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-d39f-account-create-rr5vg" Oct 02 14:40:19 crc kubenswrapper[4717]: I1002 14:40:19.889789 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-cc8g9"] Oct 02 14:40:19 crc kubenswrapper[4717]: E1002 14:40:19.890416 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9854dc2-3cbd-4ad9-a949-a3cba007ad07" containerName="mariadb-account-create" Oct 02 14:40:19 crc kubenswrapper[4717]: I1002 14:40:19.890429 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9854dc2-3cbd-4ad9-a949-a3cba007ad07" containerName="mariadb-account-create" Oct 02 14:40:19 crc kubenswrapper[4717]: I1002 14:40:19.890542 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9854dc2-3cbd-4ad9-a949-a3cba007ad07" containerName="mariadb-account-create" Oct 02 14:40:19 crc kubenswrapper[4717]: I1002 14:40:19.890968 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:19 crc kubenswrapper[4717]: I1002 14:40:19.892592 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-t4j8h" Oct 02 14:40:19 crc kubenswrapper[4717]: I1002 14:40:19.892913 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Oct 02 14:40:19 crc kubenswrapper[4717]: I1002 14:40:19.893058 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Oct 02 14:40:19 crc kubenswrapper[4717]: I1002 14:40:19.902037 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-cc8g9"] Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.016261 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-combined-ca-bundle\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.016407 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-config-data\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.016521 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-db-sync-config-data\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.016590 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs9qx\" (UniqueName: \"kubernetes.io/projected/19eb4bd7-5454-4660-bc62-8e310984dc5b-kube-api-access-rs9qx\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.118521 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-db-sync-config-data\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.118586 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs9qx\" (UniqueName: \"kubernetes.io/projected/19eb4bd7-5454-4660-bc62-8e310984dc5b-kube-api-access-rs9qx\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.118635 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-combined-ca-bundle\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.118669 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-config-data\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.122923 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-config-data\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.123132 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-db-sync-config-data\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.132431 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-combined-ca-bundle\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.148302 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs9qx\" (UniqueName: \"kubernetes.io/projected/19eb4bd7-5454-4660-bc62-8e310984dc5b-kube-api-access-rs9qx\") pod \"glance-db-sync-cc8g9\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.208576 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.430677 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-cc8g9"] Oct 02 14:40:20 crc kubenswrapper[4717]: I1002 14:40:20.695978 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-cc8g9" event={"ID":"19eb4bd7-5454-4660-bc62-8e310984dc5b","Type":"ContainerStarted","Data":"584ecb40057d7eaa9f9869ea75343527b771350c38a268279bd8a352817be07c"} Oct 02 14:40:21 crc kubenswrapper[4717]: I1002 14:40:21.706898 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-cc8g9" event={"ID":"19eb4bd7-5454-4660-bc62-8e310984dc5b","Type":"ContainerStarted","Data":"a31b776ed84372160a54d8f760db685debb304ac895385abd7f080c2a9ab3f7a"} Oct 02 14:40:21 crc kubenswrapper[4717]: I1002 14:40:21.724123 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-cc8g9" podStartSLOduration=2.724104778 podStartE2EDuration="2.724104778s" podCreationTimestamp="2025-10-02 14:40:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:40:21.722458603 +0000 UTC m=+1172.574313049" watchObservedRunningTime="2025-10-02 14:40:21.724104778 +0000 UTC m=+1172.575959224" Oct 02 14:40:24 crc kubenswrapper[4717]: I1002 14:40:24.734194 4717 generic.go:334] "Generic (PLEG): container finished" podID="19eb4bd7-5454-4660-bc62-8e310984dc5b" containerID="a31b776ed84372160a54d8f760db685debb304ac895385abd7f080c2a9ab3f7a" exitCode=0 Oct 02 14:40:24 crc kubenswrapper[4717]: I1002 14:40:24.734429 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-cc8g9" event={"ID":"19eb4bd7-5454-4660-bc62-8e310984dc5b","Type":"ContainerDied","Data":"a31b776ed84372160a54d8f760db685debb304ac895385abd7f080c2a9ab3f7a"} Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.067419 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.106485 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-db-sync-config-data\") pod \"19eb4bd7-5454-4660-bc62-8e310984dc5b\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.106531 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs9qx\" (UniqueName: \"kubernetes.io/projected/19eb4bd7-5454-4660-bc62-8e310984dc5b-kube-api-access-rs9qx\") pod \"19eb4bd7-5454-4660-bc62-8e310984dc5b\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.106643 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-config-data\") pod \"19eb4bd7-5454-4660-bc62-8e310984dc5b\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.106691 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-combined-ca-bundle\") pod \"19eb4bd7-5454-4660-bc62-8e310984dc5b\" (UID: \"19eb4bd7-5454-4660-bc62-8e310984dc5b\") " Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.122236 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "19eb4bd7-5454-4660-bc62-8e310984dc5b" (UID: "19eb4bd7-5454-4660-bc62-8e310984dc5b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.122287 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19eb4bd7-5454-4660-bc62-8e310984dc5b-kube-api-access-rs9qx" (OuterVolumeSpecName: "kube-api-access-rs9qx") pod "19eb4bd7-5454-4660-bc62-8e310984dc5b" (UID: "19eb4bd7-5454-4660-bc62-8e310984dc5b"). InnerVolumeSpecName "kube-api-access-rs9qx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.135351 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19eb4bd7-5454-4660-bc62-8e310984dc5b" (UID: "19eb4bd7-5454-4660-bc62-8e310984dc5b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.156953 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-config-data" (OuterVolumeSpecName: "config-data") pod "19eb4bd7-5454-4660-bc62-8e310984dc5b" (UID: "19eb4bd7-5454-4660-bc62-8e310984dc5b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.208601 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.208642 4717 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.208656 4717 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19eb4bd7-5454-4660-bc62-8e310984dc5b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.208665 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs9qx\" (UniqueName: \"kubernetes.io/projected/19eb4bd7-5454-4660-bc62-8e310984dc5b-kube-api-access-rs9qx\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.759066 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-cc8g9" event={"ID":"19eb4bd7-5454-4660-bc62-8e310984dc5b","Type":"ContainerDied","Data":"584ecb40057d7eaa9f9869ea75343527b771350c38a268279bd8a352817be07c"} Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.759106 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="584ecb40057d7eaa9f9869ea75343527b771350c38a268279bd8a352817be07c" Oct 02 14:40:26 crc kubenswrapper[4717]: I1002 14:40:26.759134 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-cc8g9" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.120658 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:27 crc kubenswrapper[4717]: E1002 14:40:27.121387 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19eb4bd7-5454-4660-bc62-8e310984dc5b" containerName="glance-db-sync" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.121404 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="19eb4bd7-5454-4660-bc62-8e310984dc5b" containerName="glance-db-sync" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.121554 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="19eb4bd7-5454-4660-bc62-8e310984dc5b" containerName="glance-db-sync" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.122445 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.127060 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.127093 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-public-svc" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.127316 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-t4j8h" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.127354 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.127722 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.127957 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-internal-svc" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.136920 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223652 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-httpd-run\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223701 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-config-data\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223734 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-logs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223753 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nn67\" (UniqueName: \"kubernetes.io/projected/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-kube-api-access-2nn67\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223785 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223801 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-scripts\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223820 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223841 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.223854 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.325798 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-httpd-run\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.325876 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-config-data\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.325955 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-logs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.325988 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nn67\" (UniqueName: \"kubernetes.io/projected/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-kube-api-access-2nn67\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.326056 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.326095 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-scripts\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.326127 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.326154 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.326174 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.328177 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-logs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.328176 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") device mount path \"/mnt/openstack/pv06\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.328296 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-httpd-run\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.330765 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-scripts\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.333325 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.335325 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-config-data\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.343294 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nn67\" (UniqueName: \"kubernetes.io/projected/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-kube-api-access-2nn67\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.343543 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.345637 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.348331 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.441003 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:27 crc kubenswrapper[4717]: I1002 14:40:27.902609 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:28 crc kubenswrapper[4717]: I1002 14:40:28.591619 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:28 crc kubenswrapper[4717]: I1002 14:40:28.776530 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f","Type":"ContainerStarted","Data":"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6"} Oct 02 14:40:28 crc kubenswrapper[4717]: I1002 14:40:28.776598 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f","Type":"ContainerStarted","Data":"4f1c3cc6d03790fa2e94b3c655639f42b6c30a6d8b469478e07b9db9d5c301b0"} Oct 02 14:40:29 crc kubenswrapper[4717]: I1002 14:40:29.786028 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f","Type":"ContainerStarted","Data":"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511"} Oct 02 14:40:29 crc kubenswrapper[4717]: I1002 14:40:29.786165 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerName="glance-log" containerID="cri-o://1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6" gracePeriod=30 Oct 02 14:40:29 crc kubenswrapper[4717]: I1002 14:40:29.786228 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerName="glance-httpd" containerID="cri-o://a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511" gracePeriod=30 Oct 02 14:40:29 crc kubenswrapper[4717]: I1002 14:40:29.813318 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=2.813303811 podStartE2EDuration="2.813303811s" podCreationTimestamp="2025-10-02 14:40:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:40:29.812043687 +0000 UTC m=+1180.663898133" watchObservedRunningTime="2025-10-02 14:40:29.813303811 +0000 UTC m=+1180.665158247" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.299678 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.368885 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-scripts\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.368948 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-combined-ca-bundle\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.368976 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-config-data\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369015 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-internal-tls-certs\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369075 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nn67\" (UniqueName: \"kubernetes.io/projected/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-kube-api-access-2nn67\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369115 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369142 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-public-tls-certs\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369181 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-logs\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369216 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-httpd-run\") pod \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\" (UID: \"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f\") " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369502 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-logs" (OuterVolumeSpecName: "logs") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369657 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369908 4717 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-logs\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.369922 4717 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.374612 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-scripts" (OuterVolumeSpecName: "scripts") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.375359 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.387138 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-kube-api-access-2nn67" (OuterVolumeSpecName: "kube-api-access-2nn67") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "kube-api-access-2nn67". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.407086 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.407405 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.408911 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.415673 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-config-data" (OuterVolumeSpecName: "config-data") pod "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" (UID: "ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.471635 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nn67\" (UniqueName: \"kubernetes.io/projected/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-kube-api-access-2nn67\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.471731 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.471753 4717 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.471773 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.471789 4717 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.471803 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.471818 4717 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.484077 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.573335 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.798142 4717 generic.go:334] "Generic (PLEG): container finished" podID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerID="a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511" exitCode=0 Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.798184 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.798207 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f","Type":"ContainerDied","Data":"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511"} Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.798240 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f","Type":"ContainerDied","Data":"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6"} Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.798261 4717 scope.go:117] "RemoveContainer" containerID="a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.798191 4717 generic.go:334] "Generic (PLEG): container finished" podID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerID="1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6" exitCode=143 Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.798346 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f","Type":"ContainerDied","Data":"4f1c3cc6d03790fa2e94b3c655639f42b6c30a6d8b469478e07b9db9d5c301b0"} Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.824380 4717 scope.go:117] "RemoveContainer" containerID="1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.873678 4717 scope.go:117] "RemoveContainer" containerID="a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511" Oct 02 14:40:30 crc kubenswrapper[4717]: E1002 14:40:30.874614 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511\": container with ID starting with a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511 not found: ID does not exist" containerID="a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.874692 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511"} err="failed to get container status \"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511\": rpc error: code = NotFound desc = could not find container \"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511\": container with ID starting with a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511 not found: ID does not exist" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.874746 4717 scope.go:117] "RemoveContainer" containerID="1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6" Oct 02 14:40:30 crc kubenswrapper[4717]: E1002 14:40:30.877165 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6\": container with ID starting with 1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6 not found: ID does not exist" containerID="1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.877228 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6"} err="failed to get container status \"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6\": rpc error: code = NotFound desc = could not find container \"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6\": container with ID starting with 1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6 not found: ID does not exist" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.877274 4717 scope.go:117] "RemoveContainer" containerID="a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.878214 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511"} err="failed to get container status \"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511\": rpc error: code = NotFound desc = could not find container \"a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511\": container with ID starting with a0645fd9ac447eacd97ab93b743bcb13c92621bcde01256daaed1140439fa511 not found: ID does not exist" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.878254 4717 scope.go:117] "RemoveContainer" containerID="1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.878453 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6"} err="failed to get container status \"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6\": rpc error: code = NotFound desc = could not find container \"1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6\": container with ID starting with 1da1c69438032154c0869cc7e091e353d6ace32114aebafb2a6aeb15330158c6 not found: ID does not exist" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.892297 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.892354 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.892380 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:30 crc kubenswrapper[4717]: E1002 14:40:30.892676 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerName="glance-log" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.892703 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerName="glance-log" Oct 02 14:40:30 crc kubenswrapper[4717]: E1002 14:40:30.892741 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerName="glance-httpd" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.892753 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerName="glance-httpd" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.892974 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerName="glance-log" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.893006 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" containerName="glance-httpd" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.894145 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.894264 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.907875 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.908083 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-t4j8h" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.908103 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-internal-svc" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.908300 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.908356 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-public-svc" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.908481 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980546 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980585 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwlrc\" (UniqueName: \"kubernetes.io/projected/794f8406-69b1-4755-8f78-9ce3572ea5e3-kube-api-access-nwlrc\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980627 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980645 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980822 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980855 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-logs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980880 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980906 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-httpd-run\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:30 crc kubenswrapper[4717]: I1002 14:40:30.980952 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.082759 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.082832 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.082974 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.083011 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-logs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.083047 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.083089 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-httpd-run\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.083136 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.083252 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.083302 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwlrc\" (UniqueName: \"kubernetes.io/projected/794f8406-69b1-4755-8f78-9ce3572ea5e3-kube-api-access-nwlrc\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.083402 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") device mount path \"/mnt/openstack/pv06\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.083685 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-logs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.084232 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-httpd-run\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.088398 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.088603 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.088642 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.091429 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.091603 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.103023 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.103571 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwlrc\" (UniqueName: \"kubernetes.io/projected/794f8406-69b1-4755-8f78-9ce3572ea5e3-kube-api-access-nwlrc\") pod \"glance-default-single-0\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.247314 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.683198 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:31 crc kubenswrapper[4717]: I1002 14:40:31.820588 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"794f8406-69b1-4755-8f78-9ce3572ea5e3","Type":"ContainerStarted","Data":"ac8ad3c87ba0c3854e5b34dfca39043375eee3bf4454f88325d0064cdb14fb26"} Oct 02 14:40:32 crc kubenswrapper[4717]: I1002 14:40:32.859268 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f" path="/var/lib/kubelet/pods/ce6e4d4d-18d2-4cdb-9e74-a4effb805e7f/volumes" Oct 02 14:40:32 crc kubenswrapper[4717]: I1002 14:40:32.860988 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"794f8406-69b1-4755-8f78-9ce3572ea5e3","Type":"ContainerStarted","Data":"50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85"} Oct 02 14:40:33 crc kubenswrapper[4717]: I1002 14:40:33.869702 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"794f8406-69b1-4755-8f78-9ce3572ea5e3","Type":"ContainerStarted","Data":"ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4"} Oct 02 14:40:33 crc kubenswrapper[4717]: I1002 14:40:33.917824 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=3.917798721 podStartE2EDuration="3.917798721s" podCreationTimestamp="2025-10-02 14:40:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:40:33.901557465 +0000 UTC m=+1184.753411921" watchObservedRunningTime="2025-10-02 14:40:33.917798721 +0000 UTC m=+1184.769653167" Oct 02 14:40:41 crc kubenswrapper[4717]: I1002 14:40:41.247858 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:41 crc kubenswrapper[4717]: I1002 14:40:41.248549 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:41 crc kubenswrapper[4717]: I1002 14:40:41.280461 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:41 crc kubenswrapper[4717]: I1002 14:40:41.285415 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:41 crc kubenswrapper[4717]: I1002 14:40:41.949383 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:41 crc kubenswrapper[4717]: I1002 14:40:41.949656 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:43 crc kubenswrapper[4717]: I1002 14:40:43.884532 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:43 crc kubenswrapper[4717]: I1002 14:40:43.887273 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.749848 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-cc8g9"] Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.757029 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-cc8g9"] Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.820682 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glanced39f-account-delete-gg7xs"] Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.821520 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.849620 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19eb4bd7-5454-4660-bc62-8e310984dc5b" path="/var/lib/kubelet/pods/19eb4bd7-5454-4660-bc62-8e310984dc5b/volumes" Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.851762 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.872010 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glanced39f-account-delete-gg7xs"] Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.912529 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q96xk\" (UniqueName: \"kubernetes.io/projected/0e7523c5-ac50-40c6-abc5-26ffddf86a25-kube-api-access-q96xk\") pod \"glanced39f-account-delete-gg7xs\" (UID: \"0e7523c5-ac50-40c6-abc5-26ffddf86a25\") " pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" Oct 02 14:40:44 crc kubenswrapper[4717]: I1002 14:40:44.973352 4717 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="glance-kuttl-tests/glance-default-single-0" secret="" err="secret \"glance-glance-dockercfg-t4j8h\" not found" Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.013884 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q96xk\" (UniqueName: \"kubernetes.io/projected/0e7523c5-ac50-40c6-abc5-26ffddf86a25-kube-api-access-q96xk\") pod \"glanced39f-account-delete-gg7xs\" (UID: \"0e7523c5-ac50-40c6-abc5-26ffddf86a25\") " pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.035444 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q96xk\" (UniqueName: \"kubernetes.io/projected/0e7523c5-ac50-40c6-abc5-26ffddf86a25-kube-api-access-q96xk\") pod \"glanced39f-account-delete-gg7xs\" (UID: \"0e7523c5-ac50-40c6-abc5-26ffddf86a25\") " pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" Oct 02 14:40:45 crc kubenswrapper[4717]: E1002 14:40:45.115924 4717 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-scripts: secret "glance-scripts" not found Oct 02 14:40:45 crc kubenswrapper[4717]: E1002 14:40:45.116013 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts podName:794f8406-69b1-4755-8f78-9ce3572ea5e3 nodeName:}" failed. No retries permitted until 2025-10-02 14:40:45.615995233 +0000 UTC m=+1196.467849679 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts") pod "glance-default-single-0" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3") : secret "glance-scripts" not found Oct 02 14:40:45 crc kubenswrapper[4717]: E1002 14:40:45.116181 4717 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-default-single-config-data: secret "glance-default-single-config-data" not found Oct 02 14:40:45 crc kubenswrapper[4717]: E1002 14:40:45.116222 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data podName:794f8406-69b1-4755-8f78-9ce3572ea5e3 nodeName:}" failed. No retries permitted until 2025-10-02 14:40:45.616208719 +0000 UTC m=+1196.468063155 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data") pod "glance-default-single-0" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3") : secret "glance-default-single-config-data" not found Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.142627 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.401795 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glanced39f-account-delete-gg7xs"] Oct 02 14:40:45 crc kubenswrapper[4717]: E1002 14:40:45.623312 4717 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-default-single-config-data: secret "glance-default-single-config-data" not found Oct 02 14:40:45 crc kubenswrapper[4717]: E1002 14:40:45.623700 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data podName:794f8406-69b1-4755-8f78-9ce3572ea5e3 nodeName:}" failed. No retries permitted until 2025-10-02 14:40:46.623682527 +0000 UTC m=+1197.475536973 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data") pod "glance-default-single-0" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3") : secret "glance-default-single-config-data" not found Oct 02 14:40:45 crc kubenswrapper[4717]: E1002 14:40:45.623604 4717 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-scripts: secret "glance-scripts" not found Oct 02 14:40:45 crc kubenswrapper[4717]: E1002 14:40:45.624074 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts podName:794f8406-69b1-4755-8f78-9ce3572ea5e3 nodeName:}" failed. No retries permitted until 2025-10-02 14:40:46.624033736 +0000 UTC m=+1197.475888182 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts") pod "glance-default-single-0" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3") : secret "glance-scripts" not found Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.980455 4717 generic.go:334] "Generic (PLEG): container finished" podID="0e7523c5-ac50-40c6-abc5-26ffddf86a25" containerID="24706576e27032048f0b01fca69e2fd6925d338b25eea4081688a589797a472a" exitCode=0 Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.980580 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" event={"ID":"0e7523c5-ac50-40c6-abc5-26ffddf86a25","Type":"ContainerDied","Data":"24706576e27032048f0b01fca69e2fd6925d338b25eea4081688a589797a472a"} Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.980647 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" event={"ID":"0e7523c5-ac50-40c6-abc5-26ffddf86a25","Type":"ContainerStarted","Data":"f8f490cf924ee088ce62a4d70d0023e067503024294b3af75164d7e127ce27cc"} Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.980685 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerName="glance-httpd" containerID="cri-o://ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4" gracePeriod=30 Oct 02 14:40:45 crc kubenswrapper[4717]: I1002 14:40:45.980783 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerName="glance-log" containerID="cri-o://50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85" gracePeriod=30 Oct 02 14:40:46 crc kubenswrapper[4717]: E1002 14:40:46.639522 4717 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-default-single-config-data: secret "glance-default-single-config-data" not found Oct 02 14:40:46 crc kubenswrapper[4717]: E1002 14:40:46.640036 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data podName:794f8406-69b1-4755-8f78-9ce3572ea5e3 nodeName:}" failed. No retries permitted until 2025-10-02 14:40:48.640004819 +0000 UTC m=+1199.491859305 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data") pod "glance-default-single-0" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3") : secret "glance-default-single-config-data" not found Oct 02 14:40:46 crc kubenswrapper[4717]: E1002 14:40:46.639575 4717 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-scripts: secret "glance-scripts" not found Oct 02 14:40:46 crc kubenswrapper[4717]: E1002 14:40:46.640105 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts podName:794f8406-69b1-4755-8f78-9ce3572ea5e3 nodeName:}" failed. No retries permitted until 2025-10-02 14:40:48.640092171 +0000 UTC m=+1199.491946657 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts") pod "glance-default-single-0" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3") : secret "glance-scripts" not found Oct 02 14:40:46 crc kubenswrapper[4717]: I1002 14:40:46.993881 4717 generic.go:334] "Generic (PLEG): container finished" podID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerID="50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85" exitCode=143 Oct 02 14:40:46 crc kubenswrapper[4717]: I1002 14:40:46.994218 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"794f8406-69b1-4755-8f78-9ce3572ea5e3","Type":"ContainerDied","Data":"50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85"} Oct 02 14:40:47 crc kubenswrapper[4717]: I1002 14:40:47.370060 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" Oct 02 14:40:47 crc kubenswrapper[4717]: I1002 14:40:47.451047 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q96xk\" (UniqueName: \"kubernetes.io/projected/0e7523c5-ac50-40c6-abc5-26ffddf86a25-kube-api-access-q96xk\") pod \"0e7523c5-ac50-40c6-abc5-26ffddf86a25\" (UID: \"0e7523c5-ac50-40c6-abc5-26ffddf86a25\") " Oct 02 14:40:47 crc kubenswrapper[4717]: I1002 14:40:47.456457 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e7523c5-ac50-40c6-abc5-26ffddf86a25-kube-api-access-q96xk" (OuterVolumeSpecName: "kube-api-access-q96xk") pod "0e7523c5-ac50-40c6-abc5-26ffddf86a25" (UID: "0e7523c5-ac50-40c6-abc5-26ffddf86a25"). InnerVolumeSpecName "kube-api-access-q96xk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:47 crc kubenswrapper[4717]: I1002 14:40:47.552875 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q96xk\" (UniqueName: \"kubernetes.io/projected/0e7523c5-ac50-40c6-abc5-26ffddf86a25-kube-api-access-q96xk\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:48 crc kubenswrapper[4717]: I1002 14:40:48.003459 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" event={"ID":"0e7523c5-ac50-40c6-abc5-26ffddf86a25","Type":"ContainerDied","Data":"f8f490cf924ee088ce62a4d70d0023e067503024294b3af75164d7e127ce27cc"} Oct 02 14:40:48 crc kubenswrapper[4717]: I1002 14:40:48.003502 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8f490cf924ee088ce62a4d70d0023e067503024294b3af75164d7e127ce27cc" Oct 02 14:40:48 crc kubenswrapper[4717]: I1002 14:40:48.003556 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glanced39f-account-delete-gg7xs" Oct 02 14:40:48 crc kubenswrapper[4717]: E1002 14:40:48.668926 4717 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-scripts: secret "glance-scripts" not found Oct 02 14:40:48 crc kubenswrapper[4717]: E1002 14:40:48.668998 4717 secret.go:188] Couldn't get secret glance-kuttl-tests/glance-default-single-config-data: secret "glance-default-single-config-data" not found Oct 02 14:40:48 crc kubenswrapper[4717]: E1002 14:40:48.669021 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts podName:794f8406-69b1-4755-8f78-9ce3572ea5e3 nodeName:}" failed. No retries permitted until 2025-10-02 14:40:52.668999795 +0000 UTC m=+1203.520854241 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts") pod "glance-default-single-0" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3") : secret "glance-scripts" not found Oct 02 14:40:48 crc kubenswrapper[4717]: E1002 14:40:48.669075 4717 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data podName:794f8406-69b1-4755-8f78-9ce3572ea5e3 nodeName:}" failed. No retries permitted until 2025-10-02 14:40:52.669054177 +0000 UTC m=+1203.520908703 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data") pod "glance-default-single-0" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3") : secret "glance-default-single-config-data" not found Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.566829 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680427 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680487 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-public-tls-certs\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680593 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680630 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-internal-tls-certs\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680654 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-combined-ca-bundle\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680711 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwlrc\" (UniqueName: \"kubernetes.io/projected/794f8406-69b1-4755-8f78-9ce3572ea5e3-kube-api-access-nwlrc\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680746 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-httpd-run\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680802 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-logs\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.680830 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data\") pod \"794f8406-69b1-4755-8f78-9ce3572ea5e3\" (UID: \"794f8406-69b1-4755-8f78-9ce3572ea5e3\") " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.681318 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-logs" (OuterVolumeSpecName: "logs") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.681535 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.689264 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts" (OuterVolumeSpecName: "scripts") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.689275 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.697720 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/794f8406-69b1-4755-8f78-9ce3572ea5e3-kube-api-access-nwlrc" (OuterVolumeSpecName: "kube-api-access-nwlrc") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "kube-api-access-nwlrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.700436 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.716319 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data" (OuterVolumeSpecName: "config-data") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.727257 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.739112 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "794f8406-69b1-4755-8f78-9ce3572ea5e3" (UID: "794f8406-69b1-4755-8f78-9ce3572ea5e3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782290 4717 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782640 4717 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/794f8406-69b1-4755-8f78-9ce3572ea5e3-logs\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782656 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782669 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782680 4717 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782720 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782735 4717 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782761 4717 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/794f8406-69b1-4755-8f78-9ce3572ea5e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.782775 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwlrc\" (UniqueName: \"kubernetes.io/projected/794f8406-69b1-4755-8f78-9ce3572ea5e3-kube-api-access-nwlrc\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.817271 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.833785 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-ng487"] Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.841635 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-ng487"] Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.847698 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-d39f-account-create-rr5vg"] Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.853496 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glanced39f-account-delete-gg7xs"] Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.858055 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-d39f-account-create-rr5vg"] Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.863142 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glanced39f-account-delete-gg7xs"] Oct 02 14:40:49 crc kubenswrapper[4717]: I1002 14:40:49.884727 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.023559 4717 generic.go:334] "Generic (PLEG): container finished" podID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerID="ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4" exitCode=0 Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.023599 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"794f8406-69b1-4755-8f78-9ce3572ea5e3","Type":"ContainerDied","Data":"ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4"} Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.023654 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.023685 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"794f8406-69b1-4755-8f78-9ce3572ea5e3","Type":"ContainerDied","Data":"ac8ad3c87ba0c3854e5b34dfca39043375eee3bf4454f88325d0064cdb14fb26"} Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.023717 4717 scope.go:117] "RemoveContainer" containerID="ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.057967 4717 scope.go:117] "RemoveContainer" containerID="50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.064066 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.069403 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.074104 4717 scope.go:117] "RemoveContainer" containerID="ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4" Oct 02 14:40:50 crc kubenswrapper[4717]: E1002 14:40:50.075475 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4\": container with ID starting with ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4 not found: ID does not exist" containerID="ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.075514 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4"} err="failed to get container status \"ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4\": rpc error: code = NotFound desc = could not find container \"ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4\": container with ID starting with ef019dfd3005b6bd1114673337cd11cefda545ffb438eb9ed8ca4a0e0d1550d4 not found: ID does not exist" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.075541 4717 scope.go:117] "RemoveContainer" containerID="50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85" Oct 02 14:40:50 crc kubenswrapper[4717]: E1002 14:40:50.075859 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85\": container with ID starting with 50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85 not found: ID does not exist" containerID="50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.075893 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85"} err="failed to get container status \"50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85\": rpc error: code = NotFound desc = could not find container \"50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85\": container with ID starting with 50ab22d3d0cb896aa9e2019cededcd15be0273c335ddc3e5daaebeab11a50e85 not found: ID does not exist" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.855896 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04183c90-0bd9-4aaf-ad60-abd4897a73f5" path="/var/lib/kubelet/pods/04183c90-0bd9-4aaf-ad60-abd4897a73f5/volumes" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.857302 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e7523c5-ac50-40c6-abc5-26ffddf86a25" path="/var/lib/kubelet/pods/0e7523c5-ac50-40c6-abc5-26ffddf86a25/volumes" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.858711 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" path="/var/lib/kubelet/pods/794f8406-69b1-4755-8f78-9ce3572ea5e3/volumes" Oct 02 14:40:50 crc kubenswrapper[4717]: I1002 14:40:50.861043 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9854dc2-3cbd-4ad9-a949-a3cba007ad07" path="/var/lib/kubelet/pods/f9854dc2-3cbd-4ad9-a949-a3cba007ad07/volumes" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.685343 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-ml46j"] Oct 02 14:40:51 crc kubenswrapper[4717]: E1002 14:40:51.686205 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerName="glance-log" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.686236 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerName="glance-log" Oct 02 14:40:51 crc kubenswrapper[4717]: E1002 14:40:51.686257 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e7523c5-ac50-40c6-abc5-26ffddf86a25" containerName="mariadb-account-delete" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.686268 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e7523c5-ac50-40c6-abc5-26ffddf86a25" containerName="mariadb-account-delete" Oct 02 14:40:51 crc kubenswrapper[4717]: E1002 14:40:51.686290 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerName="glance-httpd" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.686303 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerName="glance-httpd" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.686519 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerName="glance-httpd" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.686543 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e7523c5-ac50-40c6-abc5-26ffddf86a25" containerName="mariadb-account-delete" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.686561 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="794f8406-69b1-4755-8f78-9ce3572ea5e3" containerName="glance-log" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.687202 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-ml46j" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.695620 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-ml46j"] Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.813558 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkppd\" (UniqueName: \"kubernetes.io/projected/c56ce587-1ba8-401e-9238-30f2216e0aee-kube-api-access-zkppd\") pod \"glance-db-create-ml46j\" (UID: \"c56ce587-1ba8-401e-9238-30f2216e0aee\") " pod="glance-kuttl-tests/glance-db-create-ml46j" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.915651 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkppd\" (UniqueName: \"kubernetes.io/projected/c56ce587-1ba8-401e-9238-30f2216e0aee-kube-api-access-zkppd\") pod \"glance-db-create-ml46j\" (UID: \"c56ce587-1ba8-401e-9238-30f2216e0aee\") " pod="glance-kuttl-tests/glance-db-create-ml46j" Oct 02 14:40:51 crc kubenswrapper[4717]: I1002 14:40:51.941109 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkppd\" (UniqueName: \"kubernetes.io/projected/c56ce587-1ba8-401e-9238-30f2216e0aee-kube-api-access-zkppd\") pod \"glance-db-create-ml46j\" (UID: \"c56ce587-1ba8-401e-9238-30f2216e0aee\") " pod="glance-kuttl-tests/glance-db-create-ml46j" Oct 02 14:40:52 crc kubenswrapper[4717]: I1002 14:40:52.004903 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-ml46j" Oct 02 14:40:52 crc kubenswrapper[4717]: I1002 14:40:52.443381 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-ml46j"] Oct 02 14:40:53 crc kubenswrapper[4717]: I1002 14:40:53.054994 4717 generic.go:334] "Generic (PLEG): container finished" podID="c56ce587-1ba8-401e-9238-30f2216e0aee" containerID="5d90516f3c568f0109ffce2790fb5379ae9ce82b61f6f6fe0143ee7b84d8e340" exitCode=0 Oct 02 14:40:53 crc kubenswrapper[4717]: I1002 14:40:53.055068 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-ml46j" event={"ID":"c56ce587-1ba8-401e-9238-30f2216e0aee","Type":"ContainerDied","Data":"5d90516f3c568f0109ffce2790fb5379ae9ce82b61f6f6fe0143ee7b84d8e340"} Oct 02 14:40:53 crc kubenswrapper[4717]: I1002 14:40:53.055109 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-ml46j" event={"ID":"c56ce587-1ba8-401e-9238-30f2216e0aee","Type":"ContainerStarted","Data":"7d51ba4b82b83c62e13ef0ae51aff32c6c682e0a20d074328d8d6e7011c3b65a"} Oct 02 14:40:54 crc kubenswrapper[4717]: I1002 14:40:54.390102 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-ml46j" Oct 02 14:40:54 crc kubenswrapper[4717]: I1002 14:40:54.450928 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkppd\" (UniqueName: \"kubernetes.io/projected/c56ce587-1ba8-401e-9238-30f2216e0aee-kube-api-access-zkppd\") pod \"c56ce587-1ba8-401e-9238-30f2216e0aee\" (UID: \"c56ce587-1ba8-401e-9238-30f2216e0aee\") " Oct 02 14:40:54 crc kubenswrapper[4717]: I1002 14:40:54.477105 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c56ce587-1ba8-401e-9238-30f2216e0aee-kube-api-access-zkppd" (OuterVolumeSpecName: "kube-api-access-zkppd") pod "c56ce587-1ba8-401e-9238-30f2216e0aee" (UID: "c56ce587-1ba8-401e-9238-30f2216e0aee"). InnerVolumeSpecName "kube-api-access-zkppd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:40:54 crc kubenswrapper[4717]: I1002 14:40:54.552331 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkppd\" (UniqueName: \"kubernetes.io/projected/c56ce587-1ba8-401e-9238-30f2216e0aee-kube-api-access-zkppd\") on node \"crc\" DevicePath \"\"" Oct 02 14:40:55 crc kubenswrapper[4717]: I1002 14:40:55.077855 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-ml46j" event={"ID":"c56ce587-1ba8-401e-9238-30f2216e0aee","Type":"ContainerDied","Data":"7d51ba4b82b83c62e13ef0ae51aff32c6c682e0a20d074328d8d6e7011c3b65a"} Oct 02 14:40:55 crc kubenswrapper[4717]: I1002 14:40:55.077920 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d51ba4b82b83c62e13ef0ae51aff32c6c682e0a20d074328d8d6e7011c3b65a" Oct 02 14:40:55 crc kubenswrapper[4717]: I1002 14:40:55.077978 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-ml46j" Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.707953 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-7b32-account-create-s29lb"] Oct 02 14:41:01 crc kubenswrapper[4717]: E1002 14:41:01.709010 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c56ce587-1ba8-401e-9238-30f2216e0aee" containerName="mariadb-database-create" Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.709034 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="c56ce587-1ba8-401e-9238-30f2216e0aee" containerName="mariadb-database-create" Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.709250 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="c56ce587-1ba8-401e-9238-30f2216e0aee" containerName="mariadb-database-create" Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.710026 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.712120 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.713502 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-7b32-account-create-s29lb"] Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.771135 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p276n\" (UniqueName: \"kubernetes.io/projected/9556b356-4c0a-4d9d-9ff8-c949f3926434-kube-api-access-p276n\") pod \"glance-7b32-account-create-s29lb\" (UID: \"9556b356-4c0a-4d9d-9ff8-c949f3926434\") " pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.871880 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p276n\" (UniqueName: \"kubernetes.io/projected/9556b356-4c0a-4d9d-9ff8-c949f3926434-kube-api-access-p276n\") pod \"glance-7b32-account-create-s29lb\" (UID: \"9556b356-4c0a-4d9d-9ff8-c949f3926434\") " pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" Oct 02 14:41:01 crc kubenswrapper[4717]: I1002 14:41:01.890966 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p276n\" (UniqueName: \"kubernetes.io/projected/9556b356-4c0a-4d9d-9ff8-c949f3926434-kube-api-access-p276n\") pod \"glance-7b32-account-create-s29lb\" (UID: \"9556b356-4c0a-4d9d-9ff8-c949f3926434\") " pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" Oct 02 14:41:02 crc kubenswrapper[4717]: I1002 14:41:02.039533 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" Oct 02 14:41:02 crc kubenswrapper[4717]: I1002 14:41:02.485720 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-7b32-account-create-s29lb"] Oct 02 14:41:03 crc kubenswrapper[4717]: I1002 14:41:03.154559 4717 generic.go:334] "Generic (PLEG): container finished" podID="9556b356-4c0a-4d9d-9ff8-c949f3926434" containerID="322d84879e25aea6243774b05bb394d9da0ffc318606cac21f88fd34e3e39fa8" exitCode=0 Oct 02 14:41:03 crc kubenswrapper[4717]: I1002 14:41:03.154608 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" event={"ID":"9556b356-4c0a-4d9d-9ff8-c949f3926434","Type":"ContainerDied","Data":"322d84879e25aea6243774b05bb394d9da0ffc318606cac21f88fd34e3e39fa8"} Oct 02 14:41:03 crc kubenswrapper[4717]: I1002 14:41:03.154637 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" event={"ID":"9556b356-4c0a-4d9d-9ff8-c949f3926434","Type":"ContainerStarted","Data":"001061299e2cb2d5e2315eddbde0af60b050ffa484fc7d08310834c623537703"} Oct 02 14:41:04 crc kubenswrapper[4717]: I1002 14:41:04.471154 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" Oct 02 14:41:04 crc kubenswrapper[4717]: I1002 14:41:04.510898 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p276n\" (UniqueName: \"kubernetes.io/projected/9556b356-4c0a-4d9d-9ff8-c949f3926434-kube-api-access-p276n\") pod \"9556b356-4c0a-4d9d-9ff8-c949f3926434\" (UID: \"9556b356-4c0a-4d9d-9ff8-c949f3926434\") " Oct 02 14:41:04 crc kubenswrapper[4717]: I1002 14:41:04.541127 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9556b356-4c0a-4d9d-9ff8-c949f3926434-kube-api-access-p276n" (OuterVolumeSpecName: "kube-api-access-p276n") pod "9556b356-4c0a-4d9d-9ff8-c949f3926434" (UID: "9556b356-4c0a-4d9d-9ff8-c949f3926434"). InnerVolumeSpecName "kube-api-access-p276n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:41:04 crc kubenswrapper[4717]: I1002 14:41:04.613150 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p276n\" (UniqueName: \"kubernetes.io/projected/9556b356-4c0a-4d9d-9ff8-c949f3926434-kube-api-access-p276n\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:05 crc kubenswrapper[4717]: I1002 14:41:05.172090 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" event={"ID":"9556b356-4c0a-4d9d-9ff8-c949f3926434","Type":"ContainerDied","Data":"001061299e2cb2d5e2315eddbde0af60b050ffa484fc7d08310834c623537703"} Oct 02 14:41:05 crc kubenswrapper[4717]: I1002 14:41:05.172145 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="001061299e2cb2d5e2315eddbde0af60b050ffa484fc7d08310834c623537703" Oct 02 14:41:05 crc kubenswrapper[4717]: I1002 14:41:05.172158 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-7b32-account-create-s29lb" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.760803 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-7q558"] Oct 02 14:41:06 crc kubenswrapper[4717]: E1002 14:41:06.761548 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9556b356-4c0a-4d9d-9ff8-c949f3926434" containerName="mariadb-account-create" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.761564 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9556b356-4c0a-4d9d-9ff8-c949f3926434" containerName="mariadb-account-create" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.761729 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="9556b356-4c0a-4d9d-9ff8-c949f3926434" containerName="mariadb-account-create" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.762275 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.765868 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.766184 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-l4vwn" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.773452 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-7q558"] Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.847181 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhzr4\" (UniqueName: \"kubernetes.io/projected/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-kube-api-access-jhzr4\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.847544 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-db-sync-config-data\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.847957 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-config-data\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.948913 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-db-sync-config-data\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.949245 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-config-data\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.949946 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhzr4\" (UniqueName: \"kubernetes.io/projected/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-kube-api-access-jhzr4\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.954578 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-config-data\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.966855 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-db-sync-config-data\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:06 crc kubenswrapper[4717]: I1002 14:41:06.969527 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhzr4\" (UniqueName: \"kubernetes.io/projected/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-kube-api-access-jhzr4\") pod \"glance-db-sync-7q558\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:07 crc kubenswrapper[4717]: I1002 14:41:07.083439 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:07 crc kubenswrapper[4717]: I1002 14:41:07.509111 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-7q558"] Oct 02 14:41:08 crc kubenswrapper[4717]: I1002 14:41:08.202823 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-7q558" event={"ID":"b6b08c0c-8c1e-43aa-8457-5ea7f6641148","Type":"ContainerStarted","Data":"7d58c622862576531efaca91b645b90751c39f211c96f1d8c586fb31e9300999"} Oct 02 14:41:08 crc kubenswrapper[4717]: I1002 14:41:08.203219 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-7q558" event={"ID":"b6b08c0c-8c1e-43aa-8457-5ea7f6641148","Type":"ContainerStarted","Data":"6e354a6e52351b003016b711867dc4b0d4723dd8dac8baa8b41e3ed9d4bd8d17"} Oct 02 14:41:08 crc kubenswrapper[4717]: I1002 14:41:08.220955 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-7q558" podStartSLOduration=2.220910224 podStartE2EDuration="2.220910224s" podCreationTimestamp="2025-10-02 14:41:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:41:08.218741966 +0000 UTC m=+1219.070596412" watchObservedRunningTime="2025-10-02 14:41:08.220910224 +0000 UTC m=+1219.072764660" Oct 02 14:41:11 crc kubenswrapper[4717]: I1002 14:41:11.229770 4717 generic.go:334] "Generic (PLEG): container finished" podID="b6b08c0c-8c1e-43aa-8457-5ea7f6641148" containerID="7d58c622862576531efaca91b645b90751c39f211c96f1d8c586fb31e9300999" exitCode=0 Oct 02 14:41:11 crc kubenswrapper[4717]: I1002 14:41:11.229964 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-7q558" event={"ID":"b6b08c0c-8c1e-43aa-8457-5ea7f6641148","Type":"ContainerDied","Data":"7d58c622862576531efaca91b645b90751c39f211c96f1d8c586fb31e9300999"} Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.522713 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.646630 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-config-data\") pod \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.647080 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhzr4\" (UniqueName: \"kubernetes.io/projected/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-kube-api-access-jhzr4\") pod \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.647125 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-db-sync-config-data\") pod \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\" (UID: \"b6b08c0c-8c1e-43aa-8457-5ea7f6641148\") " Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.652629 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-kube-api-access-jhzr4" (OuterVolumeSpecName: "kube-api-access-jhzr4") pod "b6b08c0c-8c1e-43aa-8457-5ea7f6641148" (UID: "b6b08c0c-8c1e-43aa-8457-5ea7f6641148"). InnerVolumeSpecName "kube-api-access-jhzr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.654161 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b6b08c0c-8c1e-43aa-8457-5ea7f6641148" (UID: "b6b08c0c-8c1e-43aa-8457-5ea7f6641148"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.684368 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-config-data" (OuterVolumeSpecName: "config-data") pod "b6b08c0c-8c1e-43aa-8457-5ea7f6641148" (UID: "b6b08c0c-8c1e-43aa-8457-5ea7f6641148"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.748547 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.748584 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhzr4\" (UniqueName: \"kubernetes.io/projected/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-kube-api-access-jhzr4\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:12 crc kubenswrapper[4717]: I1002 14:41:12.748593 4717 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b6b08c0c-8c1e-43aa-8457-5ea7f6641148-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:13 crc kubenswrapper[4717]: I1002 14:41:13.256127 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-7q558" event={"ID":"b6b08c0c-8c1e-43aa-8457-5ea7f6641148","Type":"ContainerDied","Data":"6e354a6e52351b003016b711867dc4b0d4723dd8dac8baa8b41e3ed9d4bd8d17"} Oct 02 14:41:13 crc kubenswrapper[4717]: I1002 14:41:13.256196 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e354a6e52351b003016b711867dc4b0d4723dd8dac8baa8b41e3ed9d4bd8d17" Oct 02 14:41:13 crc kubenswrapper[4717]: I1002 14:41:13.256230 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-7q558" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.552657 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 02 14:41:14 crc kubenswrapper[4717]: E1002 14:41:14.553397 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6b08c0c-8c1e-43aa-8457-5ea7f6641148" containerName="glance-db-sync" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.553420 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6b08c0c-8c1e-43aa-8457-5ea7f6641148" containerName="glance-db-sync" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.553688 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6b08c0c-8c1e-43aa-8457-5ea7f6641148" containerName="glance-db-sync" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.556008 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.561383 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.563488 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.563862 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-external-config-data" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.563861 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-l4vwn" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675459 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675528 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675554 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675583 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01f9e583-3cac-4016-a4a3-3b5107a0c990-logs\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675615 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675652 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675672 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/01f9e583-3cac-4016-a4a3-3b5107a0c990-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675704 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01f9e583-3cac-4016-a4a3-3b5107a0c990-config-data\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675744 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01f9e583-3cac-4016-a4a3-3b5107a0c990-scripts\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675776 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpt2t\" (UniqueName: \"kubernetes.io/projected/01f9e583-3cac-4016-a4a3-3b5107a0c990-kube-api-access-rpt2t\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675802 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675821 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-run\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675848 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-dev\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.675875 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-sys\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.752783 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.755220 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.758546 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-internal-config-data" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.772488 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.777513 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01f9e583-3cac-4016-a4a3-3b5107a0c990-logs\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.777729 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778039 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778166 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778202 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/01f9e583-3cac-4016-a4a3-3b5107a0c990-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778422 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01f9e583-3cac-4016-a4a3-3b5107a0c990-config-data\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778561 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01f9e583-3cac-4016-a4a3-3b5107a0c990-scripts\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778675 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpt2t\" (UniqueName: \"kubernetes.io/projected/01f9e583-3cac-4016-a4a3-3b5107a0c990-kube-api-access-rpt2t\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778777 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778846 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/01f9e583-3cac-4016-a4a3-3b5107a0c990-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778839 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01f9e583-3cac-4016-a4a3-3b5107a0c990-logs\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.778386 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") device mount path \"/mnt/openstack/pv11\"" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779018 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-run\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779115 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-dev\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779271 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-sys\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779385 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779494 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779580 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779659 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779112 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") device mount path \"/mnt/openstack/pv07\"" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779617 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-sys\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779147 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-dev\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779126 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-run\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.779978 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.780039 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/01f9e583-3cac-4016-a4a3-3b5107a0c990-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.783368 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01f9e583-3cac-4016-a4a3-3b5107a0c990-config-data\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.791637 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01f9e583-3cac-4016-a4a3-3b5107a0c990-scripts\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.803625 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.804772 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpt2t\" (UniqueName: \"kubernetes.io/projected/01f9e583-3cac-4016-a4a3-3b5107a0c990-kube-api-access-rpt2t\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.821331 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"01f9e583-3cac-4016-a4a3-3b5107a0c990\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.879953 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881269 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881341 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881372 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-run\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881495 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881525 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-sys\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881584 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881628 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcxq5\" (UniqueName: \"kubernetes.io/projected/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-kube-api-access-lcxq5\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881684 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881711 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-dev\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881752 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881777 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.881817 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.882056 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.882095 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-logs\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983618 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983664 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-sys\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983699 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983728 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcxq5\" (UniqueName: \"kubernetes.io/projected/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-kube-api-access-lcxq5\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983771 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983790 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-dev\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983808 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983826 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983847 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983870 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983884 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-logs\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983900 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983945 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983919 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983971 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") device mount path \"/mnt/openstack/pv08\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983962 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-run\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.983799 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-sys\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.984470 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-run\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.984516 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.984538 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-dev\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.984574 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.984717 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.984755 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.984792 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") device mount path \"/mnt/openstack/pv09\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:14 crc kubenswrapper[4717]: I1002 14:41:14.985276 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-logs\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:15 crc kubenswrapper[4717]: I1002 14:41:15.007004 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:15 crc kubenswrapper[4717]: I1002 14:41:15.008187 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:15 crc kubenswrapper[4717]: I1002 14:41:15.022626 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcxq5\" (UniqueName: \"kubernetes.io/projected/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-kube-api-access-lcxq5\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:15 crc kubenswrapper[4717]: I1002 14:41:15.038140 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:15 crc kubenswrapper[4717]: I1002 14:41:15.042031 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:15 crc kubenswrapper[4717]: I1002 14:41:15.070747 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:15 crc kubenswrapper[4717]: I1002 14:41:15.379918 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:15 crc kubenswrapper[4717]: I1002 14:41:15.484516 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 02 14:41:15 crc kubenswrapper[4717]: W1002 14:41:15.492225 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01f9e583_3cac_4016_a4a3_3b5107a0c990.slice/crio-12aa5bbe45c9bb7c70cea8156b0a46178b86f1e5c8c4c3fb91c4203ced9a9074 WatchSource:0}: Error finding container 12aa5bbe45c9bb7c70cea8156b0a46178b86f1e5c8c4c3fb91c4203ced9a9074: Status 404 returned error can't find the container with id 12aa5bbe45c9bb7c70cea8156b0a46178b86f1e5c8c4c3fb91c4203ced9a9074 Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.085321 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.286024 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"01f9e583-3cac-4016-a4a3-3b5107a0c990","Type":"ContainerStarted","Data":"61bcf2307c48b74833e506d43364aced8ff99917ef9cafd25a61df364e0d42df"} Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.286120 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"01f9e583-3cac-4016-a4a3-3b5107a0c990","Type":"ContainerStarted","Data":"ec7f438764f138a902896ed9701d4bac7ba220364c300db7c75fc51e10744dc3"} Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.286225 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"01f9e583-3cac-4016-a4a3-3b5107a0c990","Type":"ContainerStarted","Data":"ba8195fdc4768bacfbc57d965a84f979dc03d88ad03da28f9c004893acc4a51f"} Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.286274 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"01f9e583-3cac-4016-a4a3-3b5107a0c990","Type":"ContainerStarted","Data":"12aa5bbe45c9bb7c70cea8156b0a46178b86f1e5c8c4c3fb91c4203ced9a9074"} Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.292480 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"7756fdda-f7c4-4e18-89ff-8e56c9ad211a","Type":"ContainerStarted","Data":"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd"} Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.292538 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"7756fdda-f7c4-4e18-89ff-8e56c9ad211a","Type":"ContainerStarted","Data":"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb"} Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.292556 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"7756fdda-f7c4-4e18-89ff-8e56c9ad211a","Type":"ContainerStarted","Data":"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2"} Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.292567 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"7756fdda-f7c4-4e18-89ff-8e56c9ad211a","Type":"ContainerStarted","Data":"352a7ac24953a905ee1dd21b1c149e7b35023b670f7950f1ed48206f14650e94"} Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.313827 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-external-api-0" podStartSLOduration=2.313802846 podStartE2EDuration="2.313802846s" podCreationTimestamp="2025-10-02 14:41:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:41:16.312438109 +0000 UTC m=+1227.164292565" watchObservedRunningTime="2025-10-02 14:41:16.313802846 +0000 UTC m=+1227.165657282" Oct 02 14:41:16 crc kubenswrapper[4717]: I1002 14:41:16.339060 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.339041384 podStartE2EDuration="3.339041384s" podCreationTimestamp="2025-10-02 14:41:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:41:16.337239826 +0000 UTC m=+1227.189094262" watchObservedRunningTime="2025-10-02 14:41:16.339041384 +0000 UTC m=+1227.190895830" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.300290 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-log" containerID="cri-o://d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2" gracePeriod=30 Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.300763 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-httpd" containerID="cri-o://3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb" gracePeriod=30 Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.300751 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-api" containerID="cri-o://d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd" gracePeriod=30 Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.766087 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935462 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-nvme\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935502 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935532 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-dev\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935571 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-config-data\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935611 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-lib-modules\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935628 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-var-locks-brick\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935658 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-sys\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935678 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-run\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935710 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcxq5\" (UniqueName: \"kubernetes.io/projected/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-kube-api-access-lcxq5\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935763 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-logs\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935810 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-httpd-run\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935831 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-scripts\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935853 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-iscsi\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.935876 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\" (UID: \"7756fdda-f7c4-4e18-89ff-8e56c9ad211a\") " Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.936466 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.936684 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-run" (OuterVolumeSpecName: "run") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.936718 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-dev" (OuterVolumeSpecName: "dev") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.936740 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.936769 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.936783 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-sys" (OuterVolumeSpecName: "sys") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.936924 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.937284 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.937348 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-logs" (OuterVolumeSpecName: "logs") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.947561 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.949104 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance-cache") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.952120 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-kube-api-access-lcxq5" (OuterVolumeSpecName: "kube-api-access-lcxq5") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "kube-api-access-lcxq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:41:17 crc kubenswrapper[4717]: I1002 14:41:17.953037 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-scripts" (OuterVolumeSpecName: "scripts") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.024952 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-config-data" (OuterVolumeSpecName: "config-data") pod "7756fdda-f7c4-4e18-89ff-8e56c9ad211a" (UID: "7756fdda-f7c4-4e18-89ff-8e56c9ad211a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037277 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcxq5\" (UniqueName: \"kubernetes.io/projected/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-kube-api-access-lcxq5\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037306 4717 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-logs\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037315 4717 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037324 4717 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-scripts\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037332 4717 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037351 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037359 4717 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037371 4717 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037379 4717 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-dev\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037387 4717 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-config-data\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037395 4717 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037403 4717 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037410 4717 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-sys\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.037417 4717 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/7756fdda-f7c4-4e18-89ff-8e56c9ad211a-run\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.065202 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.068182 4717 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.138978 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.139016 4717 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311808 4717 generic.go:334] "Generic (PLEG): container finished" podID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerID="d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd" exitCode=143 Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311852 4717 generic.go:334] "Generic (PLEG): container finished" podID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerID="3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb" exitCode=0 Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311862 4717 generic.go:334] "Generic (PLEG): container finished" podID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerID="d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2" exitCode=143 Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311863 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"7756fdda-f7c4-4e18-89ff-8e56c9ad211a","Type":"ContainerDied","Data":"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd"} Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311887 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311918 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"7756fdda-f7c4-4e18-89ff-8e56c9ad211a","Type":"ContainerDied","Data":"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb"} Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311947 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"7756fdda-f7c4-4e18-89ff-8e56c9ad211a","Type":"ContainerDied","Data":"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2"} Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311952 4717 scope.go:117] "RemoveContainer" containerID="d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.311959 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"7756fdda-f7c4-4e18-89ff-8e56c9ad211a","Type":"ContainerDied","Data":"352a7ac24953a905ee1dd21b1c149e7b35023b670f7950f1ed48206f14650e94"} Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.339205 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.348112 4717 scope.go:117] "RemoveContainer" containerID="3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.351879 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.366242 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:18 crc kubenswrapper[4717]: E1002 14:41:18.366490 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-httpd" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.366507 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-httpd" Oct 02 14:41:18 crc kubenswrapper[4717]: E1002 14:41:18.366521 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-api" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.366527 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-api" Oct 02 14:41:18 crc kubenswrapper[4717]: E1002 14:41:18.366551 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-log" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.366557 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-log" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.366666 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-httpd" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.366686 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-log" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.366695 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" containerName="glance-api" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.371326 4717 scope.go:117] "RemoveContainer" containerID="d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.371661 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.375362 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-internal-config-data" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.391775 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.401455 4717 scope.go:117] "RemoveContainer" containerID="d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd" Oct 02 14:41:18 crc kubenswrapper[4717]: E1002 14:41:18.401901 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd\": container with ID starting with d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd not found: ID does not exist" containerID="d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.401940 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd"} err="failed to get container status \"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd\": rpc error: code = NotFound desc = could not find container \"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd\": container with ID starting with d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.401962 4717 scope.go:117] "RemoveContainer" containerID="3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb" Oct 02 14:41:18 crc kubenswrapper[4717]: E1002 14:41:18.402430 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb\": container with ID starting with 3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb not found: ID does not exist" containerID="3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.402540 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb"} err="failed to get container status \"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb\": rpc error: code = NotFound desc = could not find container \"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb\": container with ID starting with 3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.402577 4717 scope.go:117] "RemoveContainer" containerID="d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2" Oct 02 14:41:18 crc kubenswrapper[4717]: E1002 14:41:18.404857 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2\": container with ID starting with d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2 not found: ID does not exist" containerID="d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.404919 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2"} err="failed to get container status \"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2\": rpc error: code = NotFound desc = could not find container \"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2\": container with ID starting with d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2 not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.404963 4717 scope.go:117] "RemoveContainer" containerID="d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.405207 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd"} err="failed to get container status \"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd\": rpc error: code = NotFound desc = could not find container \"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd\": container with ID starting with d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.405226 4717 scope.go:117] "RemoveContainer" containerID="3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.405457 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb"} err="failed to get container status \"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb\": rpc error: code = NotFound desc = could not find container \"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb\": container with ID starting with 3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.405478 4717 scope.go:117] "RemoveContainer" containerID="d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.405661 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2"} err="failed to get container status \"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2\": rpc error: code = NotFound desc = could not find container \"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2\": container with ID starting with d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2 not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.405681 4717 scope.go:117] "RemoveContainer" containerID="d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.406251 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd"} err="failed to get container status \"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd\": rpc error: code = NotFound desc = could not find container \"d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd\": container with ID starting with d1f855d426b0694d370c89ba0a0a1891b598f9e0655dd76d8333cd57800e37fd not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.406279 4717 scope.go:117] "RemoveContainer" containerID="3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.406534 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb"} err="failed to get container status \"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb\": rpc error: code = NotFound desc = could not find container \"3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb\": container with ID starting with 3327151d0468acc86a714323e99c22a05f1a8cc564db3186b9ae5296558024cb not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.406555 4717 scope.go:117] "RemoveContainer" containerID="d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.407004 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2"} err="failed to get container status \"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2\": rpc error: code = NotFound desc = could not find container \"d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2\": container with ID starting with d4afc617394c169854199bf5ce7e061c3f1ef4163ad8c57fc875bd061ff009b2 not found: ID does not exist" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.545698 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.545749 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-run\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.545792 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.545820 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-dev\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.545859 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-sys\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.545881 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7e7b28c-2adf-4d68-a97c-683603dec324-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.545899 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.545915 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7e7b28c-2adf-4d68-a97c-683603dec324-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.546041 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7e7b28c-2adf-4d68-a97c-683603dec324-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.546094 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.546256 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.546332 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v54n\" (UniqueName: \"kubernetes.io/projected/f7e7b28c-2adf-4d68-a97c-683603dec324-kube-api-access-9v54n\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.546403 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7e7b28c-2adf-4d68-a97c-683603dec324-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.546448 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.647716 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.647829 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.647863 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-run\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.647910 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-run\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.647977 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.647854 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648024 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-dev\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648072 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648092 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-sys\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648077 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-dev\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648124 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7e7b28c-2adf-4d68-a97c-683603dec324-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648003 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") device mount path \"/mnt/openstack/pv08\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648185 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648306 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-sys\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648347 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7e7b28c-2adf-4d68-a97c-683603dec324-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648415 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7e7b28c-2adf-4d68-a97c-683603dec324-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648353 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648480 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648537 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648585 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v54n\" (UniqueName: \"kubernetes.io/projected/f7e7b28c-2adf-4d68-a97c-683603dec324-kube-api-access-9v54n\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648632 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7e7b28c-2adf-4d68-a97c-683603dec324-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648693 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/f7e7b28c-2adf-4d68-a97c-683603dec324-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.648703 4717 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") device mount path \"/mnt/openstack/pv09\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.649036 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7e7b28c-2adf-4d68-a97c-683603dec324-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.649127 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7e7b28c-2adf-4d68-a97c-683603dec324-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.653526 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7e7b28c-2adf-4d68-a97c-683603dec324-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.658597 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7e7b28c-2adf-4d68-a97c-683603dec324-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.669416 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v54n\" (UniqueName: \"kubernetes.io/projected/f7e7b28c-2adf-4d68-a97c-683603dec324-kube-api-access-9v54n\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.673685 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.673855 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7e7b28c-2adf-4d68-a97c-683603dec324\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.695369 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:18 crc kubenswrapper[4717]: I1002 14:41:18.857350 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7756fdda-f7c4-4e18-89ff-8e56c9ad211a" path="/var/lib/kubelet/pods/7756fdda-f7c4-4e18-89ff-8e56c9ad211a/volumes" Oct 02 14:41:19 crc kubenswrapper[4717]: I1002 14:41:19.121804 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 02 14:41:19 crc kubenswrapper[4717]: I1002 14:41:19.323126 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7e7b28c-2adf-4d68-a97c-683603dec324","Type":"ContainerStarted","Data":"843f9d8cec8fc6efaf17add9ae17d2ddeb7a1f8ef71f4ad47f13cc5196726096"} Oct 02 14:41:19 crc kubenswrapper[4717]: I1002 14:41:19.323171 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7e7b28c-2adf-4d68-a97c-683603dec324","Type":"ContainerStarted","Data":"53842e67c14c115bb6c9fd7e7f14d18f6e3c9bf0ab052a8e88f1f41d4b8a0af5"} Oct 02 14:41:20 crc kubenswrapper[4717]: I1002 14:41:20.334606 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7e7b28c-2adf-4d68-a97c-683603dec324","Type":"ContainerStarted","Data":"410e7ec8078651753b4a5057b521986ee44428dc1d815ec58150cb1d4c8672fe"} Oct 02 14:41:20 crc kubenswrapper[4717]: I1002 14:41:20.335080 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7e7b28c-2adf-4d68-a97c-683603dec324","Type":"ContainerStarted","Data":"0edec90537f4505099de929df24148eb33c454ec3d0987c08bfa899660c8183b"} Oct 02 14:41:20 crc kubenswrapper[4717]: I1002 14:41:20.357294 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.3572713260000002 podStartE2EDuration="2.357271326s" podCreationTimestamp="2025-10-02 14:41:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:41:20.355072367 +0000 UTC m=+1231.206926823" watchObservedRunningTime="2025-10-02 14:41:20.357271326 +0000 UTC m=+1231.209125772" Oct 02 14:41:24 crc kubenswrapper[4717]: I1002 14:41:24.880827 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:24 crc kubenswrapper[4717]: I1002 14:41:24.881433 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:24 crc kubenswrapper[4717]: I1002 14:41:24.881467 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:24 crc kubenswrapper[4717]: I1002 14:41:24.933006 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:24 crc kubenswrapper[4717]: I1002 14:41:24.948820 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:25 crc kubenswrapper[4717]: I1002 14:41:25.036967 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:25 crc kubenswrapper[4717]: I1002 14:41:25.384352 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:25 crc kubenswrapper[4717]: I1002 14:41:25.384425 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:25 crc kubenswrapper[4717]: I1002 14:41:25.384441 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:25 crc kubenswrapper[4717]: I1002 14:41:25.399657 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:25 crc kubenswrapper[4717]: I1002 14:41:25.402439 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:25 crc kubenswrapper[4717]: I1002 14:41:25.407507 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 02 14:41:28 crc kubenswrapper[4717]: I1002 14:41:28.695686 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:28 crc kubenswrapper[4717]: I1002 14:41:28.696019 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:28 crc kubenswrapper[4717]: I1002 14:41:28.696034 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:28 crc kubenswrapper[4717]: I1002 14:41:28.729643 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:28 crc kubenswrapper[4717]: I1002 14:41:28.729719 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:28 crc kubenswrapper[4717]: I1002 14:41:28.750841 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:29 crc kubenswrapper[4717]: I1002 14:41:29.410215 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:29 crc kubenswrapper[4717]: I1002 14:41:29.410269 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:29 crc kubenswrapper[4717]: I1002 14:41:29.410283 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:29 crc kubenswrapper[4717]: I1002 14:41:29.422012 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:29 crc kubenswrapper[4717]: I1002 14:41:29.422620 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:29 crc kubenswrapper[4717]: I1002 14:41:29.425098 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 02 14:41:55 crc kubenswrapper[4717]: I1002 14:41:55.880215 4717 scope.go:117] "RemoveContainer" containerID="27cec4b762e71320a4e2534eba7fb992c877dd950ca1cd4efcc56e6e955ca017" Oct 02 14:41:55 crc kubenswrapper[4717]: I1002 14:41:55.903792 4717 scope.go:117] "RemoveContainer" containerID="f018aab85719322d29ed1bdf3fd87767004a5c70dacfe216166fdb7f3b6a28f4" Oct 02 14:41:55 crc kubenswrapper[4717]: I1002 14:41:55.930926 4717 scope.go:117] "RemoveContainer" containerID="4f18a0078d55f79d49beb022e08a01817ed4f01c3889fd19a031832083979bb7" Oct 02 14:42:18 crc kubenswrapper[4717]: I1002 14:42:18.621018 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:42:18 crc kubenswrapper[4717]: I1002 14:42:18.622219 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:42:48 crc kubenswrapper[4717]: I1002 14:42:48.620607 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:42:48 crc kubenswrapper[4717]: I1002 14:42:48.621839 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:43:18 crc kubenswrapper[4717]: I1002 14:43:18.619989 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:43:18 crc kubenswrapper[4717]: I1002 14:43:18.620961 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:43:18 crc kubenswrapper[4717]: I1002 14:43:18.621017 4717 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:43:18 crc kubenswrapper[4717]: I1002 14:43:18.621804 4717 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f4210ad73a8a6d0d88db04fcb399f0863d41d8468e02867425cd0ab6020cf084"} pod="openshift-machine-config-operator/machine-config-daemon-sk55f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 14:43:18 crc kubenswrapper[4717]: I1002 14:43:18.621869 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" containerID="cri-o://f4210ad73a8a6d0d88db04fcb399f0863d41d8468e02867425cd0ab6020cf084" gracePeriod=600 Oct 02 14:43:19 crc kubenswrapper[4717]: I1002 14:43:19.350746 4717 generic.go:334] "Generic (PLEG): container finished" podID="405aba30-0ff3-4fca-a5da-09c35263665d" containerID="f4210ad73a8a6d0d88db04fcb399f0863d41d8468e02867425cd0ab6020cf084" exitCode=0 Oct 02 14:43:19 crc kubenswrapper[4717]: I1002 14:43:19.350797 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerDied","Data":"f4210ad73a8a6d0d88db04fcb399f0863d41d8468e02867425cd0ab6020cf084"} Oct 02 14:43:19 crc kubenswrapper[4717]: I1002 14:43:19.351259 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a"} Oct 02 14:43:19 crc kubenswrapper[4717]: I1002 14:43:19.351283 4717 scope.go:117] "RemoveContainer" containerID="52384e0f02272cfcc1d37cbacff5ecff9bba1bac6264b24fc5eae60641b49d30" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.218646 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jw44b"] Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.223718 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.234256 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jw44b"] Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.340491 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-utilities\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.341017 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-catalog-content\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.341264 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p4xb\" (UniqueName: \"kubernetes.io/projected/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-kube-api-access-6p4xb\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.443085 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-utilities\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.443174 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-catalog-content\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.443197 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p4xb\" (UniqueName: \"kubernetes.io/projected/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-kube-api-access-6p4xb\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.443761 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-utilities\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.443794 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-catalog-content\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.468183 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p4xb\" (UniqueName: \"kubernetes.io/projected/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-kube-api-access-6p4xb\") pod \"redhat-operators-jw44b\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.548481 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:44 crc kubenswrapper[4717]: I1002 14:44:44.975282 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jw44b"] Oct 02 14:44:45 crc kubenswrapper[4717]: I1002 14:44:45.178727 4717 generic.go:334] "Generic (PLEG): container finished" podID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerID="ab8767fa39561079f3ed509f0cb8968fc87896c3479cbaa8b3efabed14b7217b" exitCode=0 Oct 02 14:44:45 crc kubenswrapper[4717]: I1002 14:44:45.178890 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jw44b" event={"ID":"62f91a3a-f3fa-410d-9f2c-ca6816b52de4","Type":"ContainerDied","Data":"ab8767fa39561079f3ed509f0cb8968fc87896c3479cbaa8b3efabed14b7217b"} Oct 02 14:44:45 crc kubenswrapper[4717]: I1002 14:44:45.179153 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jw44b" event={"ID":"62f91a3a-f3fa-410d-9f2c-ca6816b52de4","Type":"ContainerStarted","Data":"b1acc8e5de8d40a769ba538df702c4bf0bc0ba4809580d2a4ccff5b2c507eaa4"} Oct 02 14:44:45 crc kubenswrapper[4717]: I1002 14:44:45.180608 4717 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 02 14:44:46 crc kubenswrapper[4717]: I1002 14:44:46.195465 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jw44b" event={"ID":"62f91a3a-f3fa-410d-9f2c-ca6816b52de4","Type":"ContainerStarted","Data":"7f33493617c65ee2449710547a116581b53074cfd92e2a219ed06244fe328db3"} Oct 02 14:44:47 crc kubenswrapper[4717]: I1002 14:44:47.206444 4717 generic.go:334] "Generic (PLEG): container finished" podID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerID="7f33493617c65ee2449710547a116581b53074cfd92e2a219ed06244fe328db3" exitCode=0 Oct 02 14:44:47 crc kubenswrapper[4717]: I1002 14:44:47.206517 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jw44b" event={"ID":"62f91a3a-f3fa-410d-9f2c-ca6816b52de4","Type":"ContainerDied","Data":"7f33493617c65ee2449710547a116581b53074cfd92e2a219ed06244fe328db3"} Oct 02 14:44:48 crc kubenswrapper[4717]: I1002 14:44:48.215742 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jw44b" event={"ID":"62f91a3a-f3fa-410d-9f2c-ca6816b52de4","Type":"ContainerStarted","Data":"faa4ee797877440e59d988ae0006cb39385192716df8e7fb898651e04d7dd2d7"} Oct 02 14:44:48 crc kubenswrapper[4717]: I1002 14:44:48.239504 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jw44b" podStartSLOduration=1.641344795 podStartE2EDuration="4.239472751s" podCreationTimestamp="2025-10-02 14:44:44 +0000 UTC" firstStartedPulling="2025-10-02 14:44:45.180390745 +0000 UTC m=+1436.032245191" lastFinishedPulling="2025-10-02 14:44:47.778518701 +0000 UTC m=+1438.630373147" observedRunningTime="2025-10-02 14:44:48.231949389 +0000 UTC m=+1439.083803835" watchObservedRunningTime="2025-10-02 14:44:48.239472751 +0000 UTC m=+1439.091327237" Oct 02 14:44:54 crc kubenswrapper[4717]: I1002 14:44:54.549499 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:54 crc kubenswrapper[4717]: I1002 14:44:54.550568 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:54 crc kubenswrapper[4717]: I1002 14:44:54.623101 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:55 crc kubenswrapper[4717]: I1002 14:44:55.342579 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:55 crc kubenswrapper[4717]: I1002 14:44:55.400736 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jw44b"] Oct 02 14:44:56 crc kubenswrapper[4717]: I1002 14:44:56.017856 4717 scope.go:117] "RemoveContainer" containerID="729ebb57bfea4200c89676478bd5eb660ffc69160fbe99a8d81e3c6e4c23487d" Oct 02 14:44:56 crc kubenswrapper[4717]: I1002 14:44:56.054791 4717 scope.go:117] "RemoveContainer" containerID="f0574f484e5443d13f88806c09fe4e00f2fd7cb8002a3e0915423008edd1a964" Oct 02 14:44:57 crc kubenswrapper[4717]: I1002 14:44:57.309140 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jw44b" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerName="registry-server" containerID="cri-o://faa4ee797877440e59d988ae0006cb39385192716df8e7fb898651e04d7dd2d7" gracePeriod=2 Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.329490 4717 generic.go:334] "Generic (PLEG): container finished" podID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerID="faa4ee797877440e59d988ae0006cb39385192716df8e7fb898651e04d7dd2d7" exitCode=0 Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.329551 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jw44b" event={"ID":"62f91a3a-f3fa-410d-9f2c-ca6816b52de4","Type":"ContainerDied","Data":"faa4ee797877440e59d988ae0006cb39385192716df8e7fb898651e04d7dd2d7"} Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.615368 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.720280 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-catalog-content\") pod \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.720333 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6p4xb\" (UniqueName: \"kubernetes.io/projected/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-kube-api-access-6p4xb\") pod \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.720422 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-utilities\") pod \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\" (UID: \"62f91a3a-f3fa-410d-9f2c-ca6816b52de4\") " Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.721761 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-utilities" (OuterVolumeSpecName: "utilities") pod "62f91a3a-f3fa-410d-9f2c-ca6816b52de4" (UID: "62f91a3a-f3fa-410d-9f2c-ca6816b52de4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.729191 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-kube-api-access-6p4xb" (OuterVolumeSpecName: "kube-api-access-6p4xb") pod "62f91a3a-f3fa-410d-9f2c-ca6816b52de4" (UID: "62f91a3a-f3fa-410d-9f2c-ca6816b52de4"). InnerVolumeSpecName "kube-api-access-6p4xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.796102 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62f91a3a-f3fa-410d-9f2c-ca6816b52de4" (UID: "62f91a3a-f3fa-410d-9f2c-ca6816b52de4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.822184 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.822456 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6p4xb\" (UniqueName: \"kubernetes.io/projected/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-kube-api-access-6p4xb\") on node \"crc\" DevicePath \"\"" Oct 02 14:44:59 crc kubenswrapper[4717]: I1002 14:44:59.822525 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f91a3a-f3fa-410d-9f2c-ca6816b52de4-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.148073 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2"] Oct 02 14:45:00 crc kubenswrapper[4717]: E1002 14:45:00.148835 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerName="extract-content" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.149013 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerName="extract-content" Oct 02 14:45:00 crc kubenswrapper[4717]: E1002 14:45:00.149164 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerName="extract-utilities" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.149277 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerName="extract-utilities" Oct 02 14:45:00 crc kubenswrapper[4717]: E1002 14:45:00.149531 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerName="registry-server" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.149632 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerName="registry-server" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.150032 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" containerName="registry-server" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.150995 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.154042 4717 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.154529 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.160071 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2"] Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.228026 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9b1c567d-aae5-4323-a295-ebfeb1943e0f-secret-volume\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.228428 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcrt7\" (UniqueName: \"kubernetes.io/projected/9b1c567d-aae5-4323-a295-ebfeb1943e0f-kube-api-access-lcrt7\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.228626 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9b1c567d-aae5-4323-a295-ebfeb1943e0f-config-volume\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.330254 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcrt7\" (UniqueName: \"kubernetes.io/projected/9b1c567d-aae5-4323-a295-ebfeb1943e0f-kube-api-access-lcrt7\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.333911 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9b1c567d-aae5-4323-a295-ebfeb1943e0f-config-volume\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.331090 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9b1c567d-aae5-4323-a295-ebfeb1943e0f-config-volume\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.334704 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9b1c567d-aae5-4323-a295-ebfeb1943e0f-secret-volume\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.340978 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9b1c567d-aae5-4323-a295-ebfeb1943e0f-secret-volume\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.341633 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jw44b" event={"ID":"62f91a3a-f3fa-410d-9f2c-ca6816b52de4","Type":"ContainerDied","Data":"b1acc8e5de8d40a769ba538df702c4bf0bc0ba4809580d2a4ccff5b2c507eaa4"} Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.341777 4717 scope.go:117] "RemoveContainer" containerID="faa4ee797877440e59d988ae0006cb39385192716df8e7fb898651e04d7dd2d7" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.342065 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jw44b" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.348003 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcrt7\" (UniqueName: \"kubernetes.io/projected/9b1c567d-aae5-4323-a295-ebfeb1943e0f-kube-api-access-lcrt7\") pod \"collect-profiles-29323605-gjtx2\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.415600 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jw44b"] Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.426683 4717 scope.go:117] "RemoveContainer" containerID="7f33493617c65ee2449710547a116581b53074cfd92e2a219ed06244fe328db3" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.427542 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jw44b"] Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.447968 4717 scope.go:117] "RemoveContainer" containerID="ab8767fa39561079f3ed509f0cb8968fc87896c3479cbaa8b3efabed14b7217b" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.475829 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.864031 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62f91a3a-f3fa-410d-9f2c-ca6816b52de4" path="/var/lib/kubelet/pods/62f91a3a-f3fa-410d-9f2c-ca6816b52de4/volumes" Oct 02 14:45:00 crc kubenswrapper[4717]: I1002 14:45:00.899916 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2"] Oct 02 14:45:00 crc kubenswrapper[4717]: W1002 14:45:00.909235 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b1c567d_aae5_4323_a295_ebfeb1943e0f.slice/crio-1b0829fcdf98915b74150a2b83cf2f011417bd5d91a4e5e53139ca48c19deb0d WatchSource:0}: Error finding container 1b0829fcdf98915b74150a2b83cf2f011417bd5d91a4e5e53139ca48c19deb0d: Status 404 returned error can't find the container with id 1b0829fcdf98915b74150a2b83cf2f011417bd5d91a4e5e53139ca48c19deb0d Oct 02 14:45:01 crc kubenswrapper[4717]: I1002 14:45:01.351998 4717 generic.go:334] "Generic (PLEG): container finished" podID="9b1c567d-aae5-4323-a295-ebfeb1943e0f" containerID="6c024c6b0fa3c3118a7e972b0b0ed80dc212aa439235bdc58d8cdf4955acc10a" exitCode=0 Oct 02 14:45:01 crc kubenswrapper[4717]: I1002 14:45:01.352074 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" event={"ID":"9b1c567d-aae5-4323-a295-ebfeb1943e0f","Type":"ContainerDied","Data":"6c024c6b0fa3c3118a7e972b0b0ed80dc212aa439235bdc58d8cdf4955acc10a"} Oct 02 14:45:01 crc kubenswrapper[4717]: I1002 14:45:01.352564 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" event={"ID":"9b1c567d-aae5-4323-a295-ebfeb1943e0f","Type":"ContainerStarted","Data":"1b0829fcdf98915b74150a2b83cf2f011417bd5d91a4e5e53139ca48c19deb0d"} Oct 02 14:45:01 crc kubenswrapper[4717]: E1002 14:45:01.399328 4717 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b1c567d_aae5_4323_a295_ebfeb1943e0f.slice/crio-6c024c6b0fa3c3118a7e972b0b0ed80dc212aa439235bdc58d8cdf4955acc10a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b1c567d_aae5_4323_a295_ebfeb1943e0f.slice/crio-conmon-6c024c6b0fa3c3118a7e972b0b0ed80dc212aa439235bdc58d8cdf4955acc10a.scope\": RecentStats: unable to find data in memory cache]" Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.616436 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.678100 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9b1c567d-aae5-4323-a295-ebfeb1943e0f-secret-volume\") pod \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.678172 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9b1c567d-aae5-4323-a295-ebfeb1943e0f-config-volume\") pod \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.678296 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcrt7\" (UniqueName: \"kubernetes.io/projected/9b1c567d-aae5-4323-a295-ebfeb1943e0f-kube-api-access-lcrt7\") pod \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\" (UID: \"9b1c567d-aae5-4323-a295-ebfeb1943e0f\") " Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.679271 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b1c567d-aae5-4323-a295-ebfeb1943e0f-config-volume" (OuterVolumeSpecName: "config-volume") pod "9b1c567d-aae5-4323-a295-ebfeb1943e0f" (UID: "9b1c567d-aae5-4323-a295-ebfeb1943e0f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.683889 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b1c567d-aae5-4323-a295-ebfeb1943e0f-kube-api-access-lcrt7" (OuterVolumeSpecName: "kube-api-access-lcrt7") pod "9b1c567d-aae5-4323-a295-ebfeb1943e0f" (UID: "9b1c567d-aae5-4323-a295-ebfeb1943e0f"). InnerVolumeSpecName "kube-api-access-lcrt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.685068 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b1c567d-aae5-4323-a295-ebfeb1943e0f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9b1c567d-aae5-4323-a295-ebfeb1943e0f" (UID: "9b1c567d-aae5-4323-a295-ebfeb1943e0f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.780369 4717 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9b1c567d-aae5-4323-a295-ebfeb1943e0f-config-volume\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.780410 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcrt7\" (UniqueName: \"kubernetes.io/projected/9b1c567d-aae5-4323-a295-ebfeb1943e0f-kube-api-access-lcrt7\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:02 crc kubenswrapper[4717]: I1002 14:45:02.780429 4717 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9b1c567d-aae5-4323-a295-ebfeb1943e0f-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:03 crc kubenswrapper[4717]: I1002 14:45:03.370532 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" event={"ID":"9b1c567d-aae5-4323-a295-ebfeb1943e0f","Type":"ContainerDied","Data":"1b0829fcdf98915b74150a2b83cf2f011417bd5d91a4e5e53139ca48c19deb0d"} Oct 02 14:45:03 crc kubenswrapper[4717]: I1002 14:45:03.370589 4717 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b0829fcdf98915b74150a2b83cf2f011417bd5d91a4e5e53139ca48c19deb0d" Oct 02 14:45:03 crc kubenswrapper[4717]: I1002 14:45:03.370634 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29323605-gjtx2" Oct 02 14:45:18 crc kubenswrapper[4717]: I1002 14:45:18.620796 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:45:18 crc kubenswrapper[4717]: I1002 14:45:18.621677 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:45:22 crc kubenswrapper[4717]: I1002 14:45:22.827818 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nhpnv"] Oct 02 14:45:22 crc kubenswrapper[4717]: E1002 14:45:22.829282 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b1c567d-aae5-4323-a295-ebfeb1943e0f" containerName="collect-profiles" Oct 02 14:45:22 crc kubenswrapper[4717]: I1002 14:45:22.829306 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b1c567d-aae5-4323-a295-ebfeb1943e0f" containerName="collect-profiles" Oct 02 14:45:22 crc kubenswrapper[4717]: I1002 14:45:22.829627 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b1c567d-aae5-4323-a295-ebfeb1943e0f" containerName="collect-profiles" Oct 02 14:45:22 crc kubenswrapper[4717]: I1002 14:45:22.831508 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:22 crc kubenswrapper[4717]: I1002 14:45:22.849392 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nhpnv"] Oct 02 14:45:22 crc kubenswrapper[4717]: I1002 14:45:22.950760 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-catalog-content\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:22 crc kubenswrapper[4717]: I1002 14:45:22.950897 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89xql\" (UniqueName: \"kubernetes.io/projected/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-kube-api-access-89xql\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:22 crc kubenswrapper[4717]: I1002 14:45:22.951044 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-utilities\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:23 crc kubenswrapper[4717]: I1002 14:45:23.053343 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-catalog-content\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:23 crc kubenswrapper[4717]: I1002 14:45:23.053429 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89xql\" (UniqueName: \"kubernetes.io/projected/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-kube-api-access-89xql\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:23 crc kubenswrapper[4717]: I1002 14:45:23.053491 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-utilities\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:23 crc kubenswrapper[4717]: I1002 14:45:23.054017 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-catalog-content\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:23 crc kubenswrapper[4717]: I1002 14:45:23.054154 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-utilities\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:23 crc kubenswrapper[4717]: I1002 14:45:23.087744 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89xql\" (UniqueName: \"kubernetes.io/projected/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-kube-api-access-89xql\") pod \"certified-operators-nhpnv\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:23 crc kubenswrapper[4717]: I1002 14:45:23.168723 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:23 crc kubenswrapper[4717]: I1002 14:45:23.677213 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nhpnv"] Oct 02 14:45:23 crc kubenswrapper[4717]: W1002 14:45:23.686808 4717 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ed87f34_8e2f_42cc_a5a2_d68ef98622bf.slice/crio-a6f2f764690d27aaf0516e73f079502c4760612862e6ad4a11e9884b7eaf0c7f WatchSource:0}: Error finding container a6f2f764690d27aaf0516e73f079502c4760612862e6ad4a11e9884b7eaf0c7f: Status 404 returned error can't find the container with id a6f2f764690d27aaf0516e73f079502c4760612862e6ad4a11e9884b7eaf0c7f Oct 02 14:45:24 crc kubenswrapper[4717]: I1002 14:45:24.588561 4717 generic.go:334] "Generic (PLEG): container finished" podID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerID="a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca" exitCode=0 Oct 02 14:45:24 crc kubenswrapper[4717]: I1002 14:45:24.588612 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhpnv" event={"ID":"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf","Type":"ContainerDied","Data":"a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca"} Oct 02 14:45:24 crc kubenswrapper[4717]: I1002 14:45:24.588648 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhpnv" event={"ID":"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf","Type":"ContainerStarted","Data":"a6f2f764690d27aaf0516e73f079502c4760612862e6ad4a11e9884b7eaf0c7f"} Oct 02 14:45:25 crc kubenswrapper[4717]: I1002 14:45:25.600274 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhpnv" event={"ID":"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf","Type":"ContainerStarted","Data":"03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939"} Oct 02 14:45:26 crc kubenswrapper[4717]: I1002 14:45:26.614727 4717 generic.go:334] "Generic (PLEG): container finished" podID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerID="03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939" exitCode=0 Oct 02 14:45:26 crc kubenswrapper[4717]: I1002 14:45:26.614815 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhpnv" event={"ID":"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf","Type":"ContainerDied","Data":"03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939"} Oct 02 14:45:27 crc kubenswrapper[4717]: I1002 14:45:27.631636 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhpnv" event={"ID":"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf","Type":"ContainerStarted","Data":"108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a"} Oct 02 14:45:27 crc kubenswrapper[4717]: I1002 14:45:27.669172 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nhpnv" podStartSLOduration=3.181529812 podStartE2EDuration="5.669139102s" podCreationTimestamp="2025-10-02 14:45:22 +0000 UTC" firstStartedPulling="2025-10-02 14:45:24.591257452 +0000 UTC m=+1475.443111928" lastFinishedPulling="2025-10-02 14:45:27.078866772 +0000 UTC m=+1477.930721218" observedRunningTime="2025-10-02 14:45:27.659102523 +0000 UTC m=+1478.510956979" watchObservedRunningTime="2025-10-02 14:45:27.669139102 +0000 UTC m=+1478.520993588" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.387553 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j2m6x"] Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.389466 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.406121 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2m6x"] Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.494571 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-catalog-content\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.494700 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlb79\" (UniqueName: \"kubernetes.io/projected/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-kube-api-access-nlb79\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.494775 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-utilities\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.596839 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-catalog-content\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.596965 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlb79\" (UniqueName: \"kubernetes.io/projected/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-kube-api-access-nlb79\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.597040 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-utilities\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.597729 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-utilities\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.598356 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-catalog-content\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.620710 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlb79\" (UniqueName: \"kubernetes.io/projected/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-kube-api-access-nlb79\") pod \"redhat-marketplace-j2m6x\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.711693 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:29 crc kubenswrapper[4717]: I1002 14:45:29.965734 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2m6x"] Oct 02 14:45:30 crc kubenswrapper[4717]: I1002 14:45:30.665822 4717 generic.go:334] "Generic (PLEG): container finished" podID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerID="dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9" exitCode=0 Oct 02 14:45:30 crc kubenswrapper[4717]: I1002 14:45:30.665964 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2m6x" event={"ID":"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f","Type":"ContainerDied","Data":"dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9"} Oct 02 14:45:30 crc kubenswrapper[4717]: I1002 14:45:30.666335 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2m6x" event={"ID":"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f","Type":"ContainerStarted","Data":"332298a26ae3dfb687ecadd66248bacab716dcef5b18ddfee7587d5385b37147"} Oct 02 14:45:32 crc kubenswrapper[4717]: I1002 14:45:32.718613 4717 generic.go:334] "Generic (PLEG): container finished" podID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerID="a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc" exitCode=0 Oct 02 14:45:32 crc kubenswrapper[4717]: I1002 14:45:32.718748 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2m6x" event={"ID":"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f","Type":"ContainerDied","Data":"a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc"} Oct 02 14:45:33 crc kubenswrapper[4717]: I1002 14:45:33.169478 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:33 crc kubenswrapper[4717]: I1002 14:45:33.170144 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:33 crc kubenswrapper[4717]: I1002 14:45:33.253688 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:33 crc kubenswrapper[4717]: I1002 14:45:33.738997 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2m6x" event={"ID":"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f","Type":"ContainerStarted","Data":"96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e"} Oct 02 14:45:33 crc kubenswrapper[4717]: I1002 14:45:33.777076 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j2m6x" podStartSLOduration=2.283727757 podStartE2EDuration="4.77704118s" podCreationTimestamp="2025-10-02 14:45:29 +0000 UTC" firstStartedPulling="2025-10-02 14:45:30.669562244 +0000 UTC m=+1481.521416720" lastFinishedPulling="2025-10-02 14:45:33.162875657 +0000 UTC m=+1484.014730143" observedRunningTime="2025-10-02 14:45:33.766989789 +0000 UTC m=+1484.618844325" watchObservedRunningTime="2025-10-02 14:45:33.77704118 +0000 UTC m=+1484.628895686" Oct 02 14:45:33 crc kubenswrapper[4717]: I1002 14:45:33.814663 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:34 crc kubenswrapper[4717]: I1002 14:45:34.985289 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nhpnv"] Oct 02 14:45:35 crc kubenswrapper[4717]: I1002 14:45:35.759258 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nhpnv" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerName="registry-server" containerID="cri-o://108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a" gracePeriod=2 Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.237002 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.321654 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89xql\" (UniqueName: \"kubernetes.io/projected/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-kube-api-access-89xql\") pod \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.321721 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-catalog-content\") pod \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.321737 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-utilities\") pod \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\" (UID: \"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf\") " Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.322969 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-utilities" (OuterVolumeSpecName: "utilities") pod "4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" (UID: "4ed87f34-8e2f-42cc-a5a2-d68ef98622bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.332252 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-kube-api-access-89xql" (OuterVolumeSpecName: "kube-api-access-89xql") pod "4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" (UID: "4ed87f34-8e2f-42cc-a5a2-d68ef98622bf"). InnerVolumeSpecName "kube-api-access-89xql". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.395576 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" (UID: "4ed87f34-8e2f-42cc-a5a2-d68ef98622bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.423707 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89xql\" (UniqueName: \"kubernetes.io/projected/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-kube-api-access-89xql\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.423741 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.423750 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.772840 4717 generic.go:334] "Generic (PLEG): container finished" podID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerID="108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a" exitCode=0 Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.772929 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhpnv" event={"ID":"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf","Type":"ContainerDied","Data":"108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a"} Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.773098 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nhpnv" event={"ID":"4ed87f34-8e2f-42cc-a5a2-d68ef98622bf","Type":"ContainerDied","Data":"a6f2f764690d27aaf0516e73f079502c4760612862e6ad4a11e9884b7eaf0c7f"} Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.773143 4717 scope.go:117] "RemoveContainer" containerID="108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.773468 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nhpnv" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.809048 4717 scope.go:117] "RemoveContainer" containerID="03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.854384 4717 scope.go:117] "RemoveContainer" containerID="a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.854868 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nhpnv"] Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.863493 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nhpnv"] Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.897406 4717 scope.go:117] "RemoveContainer" containerID="108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a" Oct 02 14:45:36 crc kubenswrapper[4717]: E1002 14:45:36.898083 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a\": container with ID starting with 108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a not found: ID does not exist" containerID="108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.898162 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a"} err="failed to get container status \"108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a\": rpc error: code = NotFound desc = could not find container \"108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a\": container with ID starting with 108abcff930678f82102a1c8c7d6dc1851470fd2647cc05edd60992d35febd2a not found: ID does not exist" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.898211 4717 scope.go:117] "RemoveContainer" containerID="03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939" Oct 02 14:45:36 crc kubenswrapper[4717]: E1002 14:45:36.898764 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939\": container with ID starting with 03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939 not found: ID does not exist" containerID="03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.898858 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939"} err="failed to get container status \"03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939\": rpc error: code = NotFound desc = could not find container \"03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939\": container with ID starting with 03966f4d13b562c463ede8cddd8a2b05c2c16a9b5500a587eb89b7e783a7b939 not found: ID does not exist" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.898888 4717 scope.go:117] "RemoveContainer" containerID="a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca" Oct 02 14:45:36 crc kubenswrapper[4717]: E1002 14:45:36.899318 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca\": container with ID starting with a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca not found: ID does not exist" containerID="a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca" Oct 02 14:45:36 crc kubenswrapper[4717]: I1002 14:45:36.899363 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca"} err="failed to get container status \"a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca\": rpc error: code = NotFound desc = could not find container \"a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca\": container with ID starting with a8d92394bc48d479c995c446d521133b3349ee923c9d7a0689caf967a80353ca not found: ID does not exist" Oct 02 14:45:38 crc kubenswrapper[4717]: I1002 14:45:38.854836 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" path="/var/lib/kubelet/pods/4ed87f34-8e2f-42cc-a5a2-d68ef98622bf/volumes" Oct 02 14:45:39 crc kubenswrapper[4717]: I1002 14:45:39.713728 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:39 crc kubenswrapper[4717]: I1002 14:45:39.713836 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:39 crc kubenswrapper[4717]: I1002 14:45:39.779752 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:39 crc kubenswrapper[4717]: I1002 14:45:39.893664 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:40 crc kubenswrapper[4717]: I1002 14:45:40.584094 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2m6x"] Oct 02 14:45:41 crc kubenswrapper[4717]: I1002 14:45:41.831103 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j2m6x" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerName="registry-server" containerID="cri-o://96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e" gracePeriod=2 Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.310782 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.447357 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-utilities\") pod \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.447419 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlb79\" (UniqueName: \"kubernetes.io/projected/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-kube-api-access-nlb79\") pod \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.447462 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-catalog-content\") pod \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\" (UID: \"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f\") " Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.449346 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-utilities" (OuterVolumeSpecName: "utilities") pod "5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" (UID: "5cccb516-eda9-4fd7-a5dd-a85e7dd5529f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.454780 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-kube-api-access-nlb79" (OuterVolumeSpecName: "kube-api-access-nlb79") pod "5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" (UID: "5cccb516-eda9-4fd7-a5dd-a85e7dd5529f"). InnerVolumeSpecName "kube-api-access-nlb79". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.477277 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" (UID: "5cccb516-eda9-4fd7-a5dd-a85e7dd5529f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.550376 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.550428 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlb79\" (UniqueName: \"kubernetes.io/projected/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-kube-api-access-nlb79\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.550458 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.846070 4717 generic.go:334] "Generic (PLEG): container finished" podID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerID="96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e" exitCode=0 Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.846231 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2m6x" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.856333 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2m6x" event={"ID":"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f","Type":"ContainerDied","Data":"96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e"} Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.856417 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2m6x" event={"ID":"5cccb516-eda9-4fd7-a5dd-a85e7dd5529f","Type":"ContainerDied","Data":"332298a26ae3dfb687ecadd66248bacab716dcef5b18ddfee7587d5385b37147"} Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.856462 4717 scope.go:117] "RemoveContainer" containerID="96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.889663 4717 scope.go:117] "RemoveContainer" containerID="a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.910792 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2m6x"] Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.916690 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2m6x"] Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.936627 4717 scope.go:117] "RemoveContainer" containerID="dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.975828 4717 scope.go:117] "RemoveContainer" containerID="96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e" Oct 02 14:45:42 crc kubenswrapper[4717]: E1002 14:45:42.976968 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e\": container with ID starting with 96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e not found: ID does not exist" containerID="96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.977017 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e"} err="failed to get container status \"96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e\": rpc error: code = NotFound desc = could not find container \"96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e\": container with ID starting with 96949c0c01a964a0e83d8d09754a20a63d7a1aa16d67eacc69bdf515f15b9c8e not found: ID does not exist" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.977052 4717 scope.go:117] "RemoveContainer" containerID="a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc" Oct 02 14:45:42 crc kubenswrapper[4717]: E1002 14:45:42.978317 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc\": container with ID starting with a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc not found: ID does not exist" containerID="a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.978341 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc"} err="failed to get container status \"a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc\": rpc error: code = NotFound desc = could not find container \"a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc\": container with ID starting with a30e978b099a2310390121f1e4622fe185442d81c8972181b0b847bcd47904fc not found: ID does not exist" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.978355 4717 scope.go:117] "RemoveContainer" containerID="dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9" Oct 02 14:45:42 crc kubenswrapper[4717]: E1002 14:45:42.980272 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9\": container with ID starting with dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9 not found: ID does not exist" containerID="dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9" Oct 02 14:45:42 crc kubenswrapper[4717]: I1002 14:45:42.980330 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9"} err="failed to get container status \"dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9\": rpc error: code = NotFound desc = could not find container \"dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9\": container with ID starting with dc63b5342a2b2029baf29b4bb1d5aa317a08f171f0e76bfc9da9ffb352ed9bb9 not found: ID does not exist" Oct 02 14:45:44 crc kubenswrapper[4717]: I1002 14:45:44.856352 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" path="/var/lib/kubelet/pods/5cccb516-eda9-4fd7-a5dd-a85e7dd5529f/volumes" Oct 02 14:45:48 crc kubenswrapper[4717]: I1002 14:45:48.620018 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:45:48 crc kubenswrapper[4717]: I1002 14:45:48.620905 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:45:56 crc kubenswrapper[4717]: I1002 14:45:56.128891 4717 scope.go:117] "RemoveContainer" containerID="cd64fbe8e2891ccf40265ed0a3175e6f97ce04529e678e0961d993a11d7fc1ed" Oct 02 14:46:18 crc kubenswrapper[4717]: I1002 14:46:18.620065 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:46:18 crc kubenswrapper[4717]: I1002 14:46:18.620702 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:46:18 crc kubenswrapper[4717]: I1002 14:46:18.620744 4717 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" Oct 02 14:46:18 crc kubenswrapper[4717]: I1002 14:46:18.621457 4717 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a"} pod="openshift-machine-config-operator/machine-config-daemon-sk55f" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 02 14:46:18 crc kubenswrapper[4717]: I1002 14:46:18.621508 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" containerID="cri-o://3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" gracePeriod=600 Oct 02 14:46:18 crc kubenswrapper[4717]: E1002 14:46:18.835446 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:46:19 crc kubenswrapper[4717]: I1002 14:46:19.155245 4717 generic.go:334] "Generic (PLEG): container finished" podID="405aba30-0ff3-4fca-a5da-09c35263665d" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" exitCode=0 Oct 02 14:46:19 crc kubenswrapper[4717]: I1002 14:46:19.155301 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerDied","Data":"3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a"} Oct 02 14:46:19 crc kubenswrapper[4717]: I1002 14:46:19.155355 4717 scope.go:117] "RemoveContainer" containerID="f4210ad73a8a6d0d88db04fcb399f0863d41d8468e02867425cd0ab6020cf084" Oct 02 14:46:19 crc kubenswrapper[4717]: I1002 14:46:19.156229 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:46:19 crc kubenswrapper[4717]: E1002 14:46:19.158392 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:46:20 crc kubenswrapper[4717]: I1002 14:46:20.045422 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-db-create-9xhfl"] Oct 02 14:46:20 crc kubenswrapper[4717]: I1002 14:46:20.050303 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-db-create-9xhfl"] Oct 02 14:46:20 crc kubenswrapper[4717]: I1002 14:46:20.847096 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ef07b02-7538-48f8-9e40-7117115e2917" path="/var/lib/kubelet/pods/1ef07b02-7538-48f8-9e40-7117115e2917/volumes" Oct 02 14:46:31 crc kubenswrapper[4717]: I1002 14:46:31.838626 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:46:31 crc kubenswrapper[4717]: E1002 14:46:31.839409 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:46:42 crc kubenswrapper[4717]: I1002 14:46:42.039806 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-a1a1-account-create-wq2qg"] Oct 02 14:46:42 crc kubenswrapper[4717]: I1002 14:46:42.046411 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-a1a1-account-create-wq2qg"] Oct 02 14:46:42 crc kubenswrapper[4717]: I1002 14:46:42.849594 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5005cfbf-4a42-4330-a56d-6ff510cf500b" path="/var/lib/kubelet/pods/5005cfbf-4a42-4330-a56d-6ff510cf500b/volumes" Oct 02 14:46:43 crc kubenswrapper[4717]: I1002 14:46:43.839362 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:46:43 crc kubenswrapper[4717]: E1002 14:46:43.840898 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:46:56 crc kubenswrapper[4717]: I1002 14:46:56.253669 4717 scope.go:117] "RemoveContainer" containerID="24706576e27032048f0b01fca69e2fd6925d338b25eea4081688a589797a472a" Oct 02 14:46:56 crc kubenswrapper[4717]: I1002 14:46:56.288713 4717 scope.go:117] "RemoveContainer" containerID="a31b776ed84372160a54d8f760db685debb304ac895385abd7f080c2a9ab3f7a" Oct 02 14:46:56 crc kubenswrapper[4717]: I1002 14:46:56.337480 4717 scope.go:117] "RemoveContainer" containerID="fc9ef959e991e0f1f79f920eda2479c41994f466bc67c75884ffab94fc00aa77" Oct 02 14:46:56 crc kubenswrapper[4717]: I1002 14:46:56.361421 4717 scope.go:117] "RemoveContainer" containerID="cb990ee7d49d70135ea27662a6e39aba760d5f6760d12ad8b001aa5149b16035" Oct 02 14:46:56 crc kubenswrapper[4717]: I1002 14:46:56.390238 4717 scope.go:117] "RemoveContainer" containerID="dabe52373d5434486d68aca21f722be9dfeb38ee7efb8fd4f9b6b3fc2b7a523d" Oct 02 14:46:56 crc kubenswrapper[4717]: I1002 14:46:56.435469 4717 scope.go:117] "RemoveContainer" containerID="695395e57f33fd1a5ea70ffa8f8dbdae8444c5117aa81724194e1fcc4fd0cdf0" Oct 02 14:46:56 crc kubenswrapper[4717]: I1002 14:46:56.457982 4717 scope.go:117] "RemoveContainer" containerID="72d3d1944d761478c90857d7e01384d6d42869bcc1349cb788d5555467f24bbf" Oct 02 14:46:57 crc kubenswrapper[4717]: I1002 14:46:57.839486 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:46:57 crc kubenswrapper[4717]: E1002 14:46:57.840431 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:47:01 crc kubenswrapper[4717]: I1002 14:47:01.035404 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-6glqr"] Oct 02 14:47:01 crc kubenswrapper[4717]: I1002 14:47:01.042593 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-6glqr"] Oct 02 14:47:02 crc kubenswrapper[4717]: I1002 14:47:02.852014 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b86d48c9-74ed-4705-8990-075e751746a6" path="/var/lib/kubelet/pods/b86d48c9-74ed-4705-8990-075e751746a6/volumes" Oct 02 14:47:07 crc kubenswrapper[4717]: I1002 14:47:07.040413 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-t4m47"] Oct 02 14:47:07 crc kubenswrapper[4717]: I1002 14:47:07.051692 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-t4m47"] Oct 02 14:47:08 crc kubenswrapper[4717]: I1002 14:47:08.848698 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e" path="/var/lib/kubelet/pods/3ee2cd62-9303-4d6d-9bb6-e66c7e83b29e/volumes" Oct 02 14:47:11 crc kubenswrapper[4717]: I1002 14:47:11.839469 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:47:11 crc kubenswrapper[4717]: E1002 14:47:11.840569 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:47:25 crc kubenswrapper[4717]: I1002 14:47:25.838226 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:47:25 crc kubenswrapper[4717]: E1002 14:47:25.839046 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.334767 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:47:36 crc kubenswrapper[4717]: E1002 14:47:36.335774 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerName="extract-utilities" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.335787 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerName="extract-utilities" Oct 02 14:47:36 crc kubenswrapper[4717]: E1002 14:47:36.335810 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerName="registry-server" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.335816 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerName="registry-server" Oct 02 14:47:36 crc kubenswrapper[4717]: E1002 14:47:36.335830 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerName="extract-content" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.335836 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerName="extract-content" Oct 02 14:47:36 crc kubenswrapper[4717]: E1002 14:47:36.335844 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerName="registry-server" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.335850 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerName="registry-server" Oct 02 14:47:36 crc kubenswrapper[4717]: E1002 14:47:36.335861 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerName="extract-content" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.335868 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerName="extract-content" Oct 02 14:47:36 crc kubenswrapper[4717]: E1002 14:47:36.335878 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerName="extract-utilities" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.335883 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerName="extract-utilities" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.336043 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ed87f34-8e2f-42cc-a5a2-d68ef98622bf" containerName="registry-server" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.336059 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cccb516-eda9-4fd7-a5dd-a85e7dd5529f" containerName="registry-server" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.336512 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.340100 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.340099 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"default-dockercfg-dnbdj" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.340765 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts-9db6gc427h" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.350091 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.353082 4717 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"openstack-config-secret" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.508972 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-config-secret\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.509342 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-config\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.509445 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcklq\" (UniqueName: \"kubernetes.io/projected/10372fe8-c834-4699-825d-4fef6b48cfc1-kube-api-access-dcklq\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.509774 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-scripts\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.611501 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-scripts\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.611561 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-config-secret\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.611631 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-config\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.611704 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcklq\" (UniqueName: \"kubernetes.io/projected/10372fe8-c834-4699-825d-4fef6b48cfc1-kube-api-access-dcklq\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.612829 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-scripts\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.613346 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-config\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.621695 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10372fe8-c834-4699-825d-4fef6b48cfc1-openstack-config-secret\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.629587 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcklq\" (UniqueName: \"kubernetes.io/projected/10372fe8-c834-4699-825d-4fef6b48cfc1-kube-api-access-dcklq\") pod \"openstackclient\" (UID: \"10372fe8-c834-4699-825d-4fef6b48cfc1\") " pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:36 crc kubenswrapper[4717]: I1002 14:47:36.654580 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 02 14:47:37 crc kubenswrapper[4717]: I1002 14:47:37.108098 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 02 14:47:37 crc kubenswrapper[4717]: I1002 14:47:37.890983 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"10372fe8-c834-4699-825d-4fef6b48cfc1","Type":"ContainerStarted","Data":"8feccd9d3f8f67369167e05b47b0bd298efed534e0cbe9a02694e5a211361d96"} Oct 02 14:47:37 crc kubenswrapper[4717]: I1002 14:47:37.891442 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"10372fe8-c834-4699-825d-4fef6b48cfc1","Type":"ContainerStarted","Data":"1709a2afa976e72d64ddbabb852758fe9a0ae37a6a6a583c84e5f2edee25462d"} Oct 02 14:47:37 crc kubenswrapper[4717]: I1002 14:47:37.916262 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstackclient" podStartSLOduration=1.916239263 podStartE2EDuration="1.916239263s" podCreationTimestamp="2025-10-02 14:47:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-02 14:47:37.910999023 +0000 UTC m=+1608.762853459" watchObservedRunningTime="2025-10-02 14:47:37.916239263 +0000 UTC m=+1608.768093719" Oct 02 14:47:40 crc kubenswrapper[4717]: I1002 14:47:40.843493 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:47:40 crc kubenswrapper[4717]: E1002 14:47:40.844258 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:47:53 crc kubenswrapper[4717]: I1002 14:47:53.839246 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:47:53 crc kubenswrapper[4717]: E1002 14:47:53.840242 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:47:56 crc kubenswrapper[4717]: I1002 14:47:56.599159 4717 scope.go:117] "RemoveContainer" containerID="a2accff64639b592679f5a28c4d69a7ec446ca652b42cca2a0e173870fdf5189" Oct 02 14:47:56 crc kubenswrapper[4717]: I1002 14:47:56.654304 4717 scope.go:117] "RemoveContainer" containerID="7af081b13d8195b13ebfc4c4971e30bc8b4421f86c5883b62bc0edd1c60d9348" Oct 02 14:48:04 crc kubenswrapper[4717]: I1002 14:48:04.838723 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:48:04 crc kubenswrapper[4717]: E1002 14:48:04.839464 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:48:19 crc kubenswrapper[4717]: I1002 14:48:19.839685 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:48:19 crc kubenswrapper[4717]: E1002 14:48:19.840463 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.380925 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pvzb7"] Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.383095 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.400134 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvzb7"] Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.529373 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8zhc\" (UniqueName: \"kubernetes.io/projected/ec48d790-dc88-4878-8f39-83f0cb94ab1d-kube-api-access-m8zhc\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.529597 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-utilities\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.529871 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-catalog-content\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.631498 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-utilities\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.631619 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-catalog-content\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.631657 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8zhc\" (UniqueName: \"kubernetes.io/projected/ec48d790-dc88-4878-8f39-83f0cb94ab1d-kube-api-access-m8zhc\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.632097 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-utilities\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.632490 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-catalog-content\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.656997 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8zhc\" (UniqueName: \"kubernetes.io/projected/ec48d790-dc88-4878-8f39-83f0cb94ab1d-kube-api-access-m8zhc\") pod \"community-operators-pvzb7\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:23 crc kubenswrapper[4717]: I1002 14:48:23.704799 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:24 crc kubenswrapper[4717]: I1002 14:48:24.212853 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvzb7"] Oct 02 14:48:24 crc kubenswrapper[4717]: I1002 14:48:24.329872 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvzb7" event={"ID":"ec48d790-dc88-4878-8f39-83f0cb94ab1d","Type":"ContainerStarted","Data":"5e809c45430be63593b54eb750333de2859c8e6616c55847a9fb2460f569045b"} Oct 02 14:48:25 crc kubenswrapper[4717]: I1002 14:48:25.337484 4717 generic.go:334] "Generic (PLEG): container finished" podID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerID="27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88" exitCode=0 Oct 02 14:48:25 crc kubenswrapper[4717]: I1002 14:48:25.337551 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvzb7" event={"ID":"ec48d790-dc88-4878-8f39-83f0cb94ab1d","Type":"ContainerDied","Data":"27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88"} Oct 02 14:48:27 crc kubenswrapper[4717]: I1002 14:48:27.356185 4717 generic.go:334] "Generic (PLEG): container finished" podID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerID="74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7" exitCode=0 Oct 02 14:48:27 crc kubenswrapper[4717]: I1002 14:48:27.356248 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvzb7" event={"ID":"ec48d790-dc88-4878-8f39-83f0cb94ab1d","Type":"ContainerDied","Data":"74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7"} Oct 02 14:48:28 crc kubenswrapper[4717]: I1002 14:48:28.367272 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvzb7" event={"ID":"ec48d790-dc88-4878-8f39-83f0cb94ab1d","Type":"ContainerStarted","Data":"cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a"} Oct 02 14:48:31 crc kubenswrapper[4717]: I1002 14:48:31.839114 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:48:31 crc kubenswrapper[4717]: E1002 14:48:31.839801 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:48:33 crc kubenswrapper[4717]: I1002 14:48:33.705084 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:33 crc kubenswrapper[4717]: I1002 14:48:33.705609 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:33 crc kubenswrapper[4717]: I1002 14:48:33.793716 4717 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:33 crc kubenswrapper[4717]: I1002 14:48:33.830531 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pvzb7" podStartSLOduration=8.137751964 podStartE2EDuration="10.830501659s" podCreationTimestamp="2025-10-02 14:48:23 +0000 UTC" firstStartedPulling="2025-10-02 14:48:25.340241287 +0000 UTC m=+1656.192095733" lastFinishedPulling="2025-10-02 14:48:28.032990982 +0000 UTC m=+1658.884845428" observedRunningTime="2025-10-02 14:48:28.392079069 +0000 UTC m=+1659.243933525" watchObservedRunningTime="2025-10-02 14:48:33.830501659 +0000 UTC m=+1664.682356135" Oct 02 14:48:34 crc kubenswrapper[4717]: I1002 14:48:34.472509 4717 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:37 crc kubenswrapper[4717]: I1002 14:48:37.377767 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvzb7"] Oct 02 14:48:37 crc kubenswrapper[4717]: I1002 14:48:37.378624 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pvzb7" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerName="registry-server" containerID="cri-o://cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a" gracePeriod=2 Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.429065 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.449614 4717 generic.go:334] "Generic (PLEG): container finished" podID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerID="cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a" exitCode=0 Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.450101 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvzb7" event={"ID":"ec48d790-dc88-4878-8f39-83f0cb94ab1d","Type":"ContainerDied","Data":"cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a"} Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.450435 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvzb7" event={"ID":"ec48d790-dc88-4878-8f39-83f0cb94ab1d","Type":"ContainerDied","Data":"5e809c45430be63593b54eb750333de2859c8e6616c55847a9fb2460f569045b"} Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.450156 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvzb7" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.450550 4717 scope.go:117] "RemoveContainer" containerID="cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.490838 4717 scope.go:117] "RemoveContainer" containerID="74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.497880 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8zhc\" (UniqueName: \"kubernetes.io/projected/ec48d790-dc88-4878-8f39-83f0cb94ab1d-kube-api-access-m8zhc\") pod \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.498007 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-catalog-content\") pod \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.498069 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-utilities\") pod \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\" (UID: \"ec48d790-dc88-4878-8f39-83f0cb94ab1d\") " Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.499363 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-utilities" (OuterVolumeSpecName: "utilities") pod "ec48d790-dc88-4878-8f39-83f0cb94ab1d" (UID: "ec48d790-dc88-4878-8f39-83f0cb94ab1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.509643 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec48d790-dc88-4878-8f39-83f0cb94ab1d-kube-api-access-m8zhc" (OuterVolumeSpecName: "kube-api-access-m8zhc") pod "ec48d790-dc88-4878-8f39-83f0cb94ab1d" (UID: "ec48d790-dc88-4878-8f39-83f0cb94ab1d"). InnerVolumeSpecName "kube-api-access-m8zhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.512309 4717 scope.go:117] "RemoveContainer" containerID="27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.551219 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec48d790-dc88-4878-8f39-83f0cb94ab1d" (UID: "ec48d790-dc88-4878-8f39-83f0cb94ab1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.562540 4717 scope.go:117] "RemoveContainer" containerID="cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a" Oct 02 14:48:38 crc kubenswrapper[4717]: E1002 14:48:38.563040 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a\": container with ID starting with cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a not found: ID does not exist" containerID="cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.563097 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a"} err="failed to get container status \"cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a\": rpc error: code = NotFound desc = could not find container \"cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a\": container with ID starting with cb9493644f14891c30a6b78b79eed3528178576ba035a41a45664e3f7715e60a not found: ID does not exist" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.563128 4717 scope.go:117] "RemoveContainer" containerID="74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7" Oct 02 14:48:38 crc kubenswrapper[4717]: E1002 14:48:38.563426 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7\": container with ID starting with 74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7 not found: ID does not exist" containerID="74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.563466 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7"} err="failed to get container status \"74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7\": rpc error: code = NotFound desc = could not find container \"74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7\": container with ID starting with 74e31c2dafc55a60a61674e8769f0f42e326dd766ac51d5d1a6228e95d4c79f7 not found: ID does not exist" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.563487 4717 scope.go:117] "RemoveContainer" containerID="27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88" Oct 02 14:48:38 crc kubenswrapper[4717]: E1002 14:48:38.563808 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88\": container with ID starting with 27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88 not found: ID does not exist" containerID="27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.563882 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88"} err="failed to get container status \"27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88\": rpc error: code = NotFound desc = could not find container \"27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88\": container with ID starting with 27583341f6b2073f3e873c7acb9d18583c3cb7ce1f8ebc8390e71b21df6b0f88 not found: ID does not exist" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.599537 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8zhc\" (UniqueName: \"kubernetes.io/projected/ec48d790-dc88-4878-8f39-83f0cb94ab1d-kube-api-access-m8zhc\") on node \"crc\" DevicePath \"\"" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.599581 4717 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.599593 4717 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec48d790-dc88-4878-8f39-83f0cb94ab1d-utilities\") on node \"crc\" DevicePath \"\"" Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.798470 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvzb7"] Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.809322 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pvzb7"] Oct 02 14:48:38 crc kubenswrapper[4717]: I1002 14:48:38.847715 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" path="/var/lib/kubelet/pods/ec48d790-dc88-4878-8f39-83f0cb94ab1d/volumes" Oct 02 14:48:42 crc kubenswrapper[4717]: I1002 14:48:42.839395 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:48:42 crc kubenswrapper[4717]: E1002 14:48:42.839967 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:48:56 crc kubenswrapper[4717]: I1002 14:48:56.843490 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:48:56 crc kubenswrapper[4717]: E1002 14:48:56.844540 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:49:08 crc kubenswrapper[4717]: I1002 14:49:08.839869 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:49:08 crc kubenswrapper[4717]: E1002 14:49:08.844429 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.176656 4717 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-hgh4x/must-gather-vvtqg"] Oct 02 14:49:12 crc kubenswrapper[4717]: E1002 14:49:12.177494 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerName="registry-server" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.177514 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerName="registry-server" Oct 02 14:49:12 crc kubenswrapper[4717]: E1002 14:49:12.177545 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerName="extract-utilities" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.177556 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerName="extract-utilities" Oct 02 14:49:12 crc kubenswrapper[4717]: E1002 14:49:12.177575 4717 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerName="extract-content" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.177583 4717 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerName="extract-content" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.177762 4717 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec48d790-dc88-4878-8f39-83f0cb94ab1d" containerName="registry-server" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.178607 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.182040 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-hgh4x"/"openshift-service-ca.crt" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.182510 4717 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-hgh4x"/"kube-root-ca.crt" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.238724 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-hgh4x/must-gather-vvtqg"] Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.357870 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5m55\" (UniqueName: \"kubernetes.io/projected/24058d7b-a548-427d-8f67-39ce7cae05e3-kube-api-access-k5m55\") pod \"must-gather-vvtqg\" (UID: \"24058d7b-a548-427d-8f67-39ce7cae05e3\") " pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.357979 4717 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/24058d7b-a548-427d-8f67-39ce7cae05e3-must-gather-output\") pod \"must-gather-vvtqg\" (UID: \"24058d7b-a548-427d-8f67-39ce7cae05e3\") " pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.459692 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5m55\" (UniqueName: \"kubernetes.io/projected/24058d7b-a548-427d-8f67-39ce7cae05e3-kube-api-access-k5m55\") pod \"must-gather-vvtqg\" (UID: \"24058d7b-a548-427d-8f67-39ce7cae05e3\") " pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.459749 4717 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/24058d7b-a548-427d-8f67-39ce7cae05e3-must-gather-output\") pod \"must-gather-vvtqg\" (UID: \"24058d7b-a548-427d-8f67-39ce7cae05e3\") " pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.460156 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/24058d7b-a548-427d-8f67-39ce7cae05e3-must-gather-output\") pod \"must-gather-vvtqg\" (UID: \"24058d7b-a548-427d-8f67-39ce7cae05e3\") " pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.478999 4717 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5m55\" (UniqueName: \"kubernetes.io/projected/24058d7b-a548-427d-8f67-39ce7cae05e3-kube-api-access-k5m55\") pod \"must-gather-vvtqg\" (UID: \"24058d7b-a548-427d-8f67-39ce7cae05e3\") " pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.511346 4717 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:49:12 crc kubenswrapper[4717]: I1002 14:49:12.747016 4717 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-hgh4x/must-gather-vvtqg"] Oct 02 14:49:13 crc kubenswrapper[4717]: I1002 14:49:13.755616 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" event={"ID":"24058d7b-a548-427d-8f67-39ce7cae05e3","Type":"ContainerStarted","Data":"3a3a574e32b8146c237469da4406663aa7dc749852cc43e59ec01b39ed38dffa"} Oct 02 14:49:19 crc kubenswrapper[4717]: I1002 14:49:19.823318 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" event={"ID":"24058d7b-a548-427d-8f67-39ce7cae05e3","Type":"ContainerStarted","Data":"13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25"} Oct 02 14:49:19 crc kubenswrapper[4717]: I1002 14:49:19.823910 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" event={"ID":"24058d7b-a548-427d-8f67-39ce7cae05e3","Type":"ContainerStarted","Data":"eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781"} Oct 02 14:49:19 crc kubenswrapper[4717]: I1002 14:49:19.845892 4717 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" podStartSLOduration=1.7326651819999999 podStartE2EDuration="7.845868711s" podCreationTimestamp="2025-10-02 14:49:12 +0000 UTC" firstStartedPulling="2025-10-02 14:49:12.789314305 +0000 UTC m=+1703.641168751" lastFinishedPulling="2025-10-02 14:49:18.902517834 +0000 UTC m=+1709.754372280" observedRunningTime="2025-10-02 14:49:19.845665675 +0000 UTC m=+1710.697520121" watchObservedRunningTime="2025-10-02 14:49:19.845868711 +0000 UTC m=+1710.697723147" Oct 02 14:49:22 crc kubenswrapper[4717]: I1002 14:49:22.838611 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:49:22 crc kubenswrapper[4717]: E1002 14:49:22.839010 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:49:34 crc kubenswrapper[4717]: I1002 14:49:34.840249 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:49:34 crc kubenswrapper[4717]: E1002 14:49:34.842571 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:49:49 crc kubenswrapper[4717]: I1002 14:49:49.839465 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:49:49 crc kubenswrapper[4717]: E1002 14:49:49.840547 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.000206 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq_25007f37-eab2-4a19-aaec-041dccf4a1fa/util/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.202332 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq_25007f37-eab2-4a19-aaec-041dccf4a1fa/util/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.216592 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq_25007f37-eab2-4a19-aaec-041dccf4a1fa/pull/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.223296 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq_25007f37-eab2-4a19-aaec-041dccf4a1fa/pull/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.403561 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq_25007f37-eab2-4a19-aaec-041dccf4a1fa/pull/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.410302 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq_25007f37-eab2-4a19-aaec-041dccf4a1fa/extract/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.423238 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_24f71a75347ad7a5f841d153f802ac72a45ac2356b85254a24ab5c9f58mhmrq_25007f37-eab2-4a19-aaec-041dccf4a1fa/util/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.615746 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8_f091692c-789e-45e1-aa38-a06ad59db093/util/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.748585 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8_f091692c-789e-45e1-aa38-a06ad59db093/util/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.750127 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8_f091692c-789e-45e1-aa38-a06ad59db093/pull/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.807837 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8_f091692c-789e-45e1-aa38-a06ad59db093/pull/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.952516 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8_f091692c-789e-45e1-aa38-a06ad59db093/util/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.952786 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8_f091692c-789e-45e1-aa38-a06ad59db093/pull/0.log" Oct 02 14:49:55 crc kubenswrapper[4717]: I1002 14:49:55.976353 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_33f96395a4977d5ccf9ed0afcd30549f9ec17f38bc1066e9c5c7a4f3164dcn8_f091692c-789e-45e1-aa38-a06ad59db093/extract/0.log" Oct 02 14:49:56 crc kubenswrapper[4717]: I1002 14:49:56.162774 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv_9e697dbf-f458-4f6f-83af-57ef6086b720/util/0.log" Oct 02 14:49:56 crc kubenswrapper[4717]: I1002 14:49:56.331054 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv_9e697dbf-f458-4f6f-83af-57ef6086b720/util/0.log" Oct 02 14:49:56 crc kubenswrapper[4717]: I1002 14:49:56.385793 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv_9e697dbf-f458-4f6f-83af-57ef6086b720/pull/0.log" Oct 02 14:49:56 crc kubenswrapper[4717]: I1002 14:49:56.414542 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv_9e697dbf-f458-4f6f-83af-57ef6086b720/pull/0.log" Oct 02 14:49:56 crc kubenswrapper[4717]: I1002 14:49:56.743056 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv_9e697dbf-f458-4f6f-83af-57ef6086b720/extract/0.log" Oct 02 14:49:56 crc kubenswrapper[4717]: I1002 14:49:56.811795 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv_9e697dbf-f458-4f6f-83af-57ef6086b720/util/0.log" Oct 02 14:49:56 crc kubenswrapper[4717]: I1002 14:49:56.818173 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bfb89b7a15e902ec1ce651098a1cbdcb0a2281c38e30d9a342b952813rsndv_9e697dbf-f458-4f6f-83af-57ef6086b720/pull/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.018034 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc_0a5398f8-f537-4676-a305-844c9975fb95/util/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.214136 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc_0a5398f8-f537-4676-a305-844c9975fb95/pull/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.221655 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc_0a5398f8-f537-4676-a305-844c9975fb95/pull/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.243293 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc_0a5398f8-f537-4676-a305-844c9975fb95/util/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.420605 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc_0a5398f8-f537-4676-a305-844c9975fb95/util/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.453181 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc_0a5398f8-f537-4676-a305-844c9975fb95/pull/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.506542 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5902cqqc_0a5398f8-f537-4676-a305-844c9975fb95/extract/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.626161 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx_d61faefb-44b1-49a1-968c-48bf323a6c54/util/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.768652 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx_d61faefb-44b1-49a1-968c-48bf323a6c54/util/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.808926 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx_d61faefb-44b1-49a1-968c-48bf323a6c54/pull/0.log" Oct 02 14:49:57 crc kubenswrapper[4717]: I1002 14:49:57.826881 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx_d61faefb-44b1-49a1-968c-48bf323a6c54/pull/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.014920 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx_d61faefb-44b1-49a1-968c-48bf323a6c54/util/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.015357 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx_d61faefb-44b1-49a1-968c-48bf323a6c54/pull/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.047276 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_c77910536a79801a83f49d4fd4581e5a2972791dfc31ed0ea9f0ffea32l4dcx_d61faefb-44b1-49a1-968c-48bf323a6c54/extract/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.095096 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn_13be3ae0-c85c-4ff4-937a-fdc536f9e99a/util/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.266890 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn_13be3ae0-c85c-4ff4-937a-fdc536f9e99a/pull/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.279579 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn_13be3ae0-c85c-4ff4-937a-fdc536f9e99a/util/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.289117 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn_13be3ae0-c85c-4ff4-937a-fdc536f9e99a/pull/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.470771 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn_13be3ae0-c85c-4ff4-937a-fdc536f9e99a/util/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.510043 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn_13be3ae0-c85c-4ff4-937a-fdc536f9e99a/extract/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.512075 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2_29ecebf2-e728-4f9c-8f0b-060eda32da1e/util/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.541598 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_d93b99dddc714b0f4b2148f40016b9ead21cc18743d58ffe812e1bd436l85gn_13be3ae0-c85c-4ff4-937a-fdc536f9e99a/pull/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.707312 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2_29ecebf2-e728-4f9c-8f0b-060eda32da1e/pull/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.726372 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2_29ecebf2-e728-4f9c-8f0b-060eda32da1e/util/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.758335 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2_29ecebf2-e728-4f9c-8f0b-060eda32da1e/pull/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.962102 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2_29ecebf2-e728-4f9c-8f0b-060eda32da1e/util/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.974509 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2_29ecebf2-e728-4f9c-8f0b-060eda32da1e/pull/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.975296 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ec20a04ef7278338c96ca90950ec47944973b8553e1da5c6f2ce730402v4lb2_29ecebf2-e728-4f9c-8f0b-060eda32da1e/extract/0.log" Oct 02 14:49:58 crc kubenswrapper[4717]: I1002 14:49:58.981174 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-6677765f49-4gg6t_e0b9d395-74ea-4b6e-9700-07e464512c7e/kube-rbac-proxy/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.193498 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-index-fmxm2_ec6951a5-2a08-4902-8a9b-3dd3a43bce05/registry-server/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.257805 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-6677765f49-4gg6t_e0b9d395-74ea-4b6e-9700-07e464512c7e/manager/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.300789 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-664b44f579-nkj7h_437a77e2-6cb4-4075-b0fa-1f8922bd2c76/kube-rbac-proxy/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.398661 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-664b44f579-nkj7h_437a77e2-6cb4-4075-b0fa-1f8922bd2c76/manager/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.417573 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-index-br82z_08984fe8-ca53-44ac-8958-0ea63894ff61/registry-server/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.521672 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7595d7c777-4hnct_882db0a2-0e31-4202-8286-9435da5165a9/kube-rbac-proxy/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.643547 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7595d7c777-4hnct_882db0a2-0e31-4202-8286-9435da5165a9/manager/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.645747 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-index-t8wzp_0e56647e-922d-435a-a84d-1c1910c2391f/registry-server/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.717446 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-6bc9db746f-mv89l_08448dc6-f974-428d-81a3-d205e812f0ee/kube-rbac-proxy/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.908910 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-6bc9db746f-mv89l_08448dc6-f974-428d-81a3-d205e812f0ee/manager/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.924118 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-index-8shh4_b03c8168-fa0b-4435-8216-08f4f71d90a7/registry-server/0.log" Oct 02 14:49:59 crc kubenswrapper[4717]: I1002 14:49:59.956032 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-7b6fb6cd76-5ddtr_85760b2d-ab19-40a7-91a2-b9266f9ab4d1/kube-rbac-proxy/0.log" Oct 02 14:50:00 crc kubenswrapper[4717]: I1002 14:50:00.057476 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-7b6fb6cd76-5ddtr_85760b2d-ab19-40a7-91a2-b9266f9ab4d1/manager/0.log" Oct 02 14:50:00 crc kubenswrapper[4717]: I1002 14:50:00.143881 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-index-8wntw_4db284ef-7569-4a82-b4f5-b49b66745a31/registry-server/0.log" Oct 02 14:50:00 crc kubenswrapper[4717]: I1002 14:50:00.172209 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-779fc9694b-t85n5_4848f26f-88d7-4cf6-b271-f419c946be0f/operator/0.log" Oct 02 14:50:00 crc kubenswrapper[4717]: I1002 14:50:00.301996 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-index-lck67_c2ce2ae9-a067-4415-9152-182a699d3772/registry-server/0.log" Oct 02 14:50:00 crc kubenswrapper[4717]: I1002 14:50:00.372314 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7cf97c46f6-tw6bc_19ac5c12-8b42-4f73-902a-dfcc557f8054/kube-rbac-proxy/0.log" Oct 02 14:50:00 crc kubenswrapper[4717]: I1002 14:50:00.414569 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-7cf97c46f6-tw6bc_19ac5c12-8b42-4f73-902a-dfcc557f8054/manager/0.log" Oct 02 14:50:00 crc kubenswrapper[4717]: I1002 14:50:00.547205 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-index-w94s9_60443082-f42b-4c42-b976-eaccbfeaabfc/registry-server/0.log" Oct 02 14:50:00 crc kubenswrapper[4717]: I1002 14:50:00.844620 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:50:00 crc kubenswrapper[4717]: E1002 14:50:00.845036 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:50:14 crc kubenswrapper[4717]: I1002 14:50:14.335559 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-zr8w6_a1756fd2-89da-4978-9a32-2201fbff47ee/control-plane-machine-set-operator/0.log" Oct 02 14:50:14 crc kubenswrapper[4717]: I1002 14:50:14.507209 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-d6qrw_95017b18-3508-46ed-a3a7-a6834d5ada15/kube-rbac-proxy/0.log" Oct 02 14:50:14 crc kubenswrapper[4717]: I1002 14:50:14.526190 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-d6qrw_95017b18-3508-46ed-a3a7-a6834d5ada15/machine-api-operator/0.log" Oct 02 14:50:14 crc kubenswrapper[4717]: I1002 14:50:14.839489 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:50:14 crc kubenswrapper[4717]: E1002 14:50:14.839735 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:50:28 crc kubenswrapper[4717]: I1002 14:50:28.840575 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:50:28 crc kubenswrapper[4717]: E1002 14:50:28.842316 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:50:30 crc kubenswrapper[4717]: I1002 14:50:30.995460 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-jj4xv_a23b49f3-2a36-424a-a9d3-7f3a2868ede2/kube-rbac-proxy/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.062261 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-jj4xv_a23b49f3-2a36-424a-a9d3-7f3a2868ede2/controller/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.190047 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-frr-files/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.328001 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-reloader/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.328056 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-frr-files/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.372913 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-reloader/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.398576 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-metrics/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.579895 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-metrics/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.616796 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-frr-files/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.627582 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-reloader/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.657999 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-metrics/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.858660 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-reloader/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.881677 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/controller/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.884134 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-frr-files/0.log" Oct 02 14:50:31 crc kubenswrapper[4717]: I1002 14:50:31.886286 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/cp-metrics/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.075846 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/frr-metrics/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.098344 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/kube-rbac-proxy/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.099321 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/kube-rbac-proxy-frr/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.326190 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/reloader/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.354004 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-29hc8_0d083165-db97-4223-a0ea-808e1d3501de/frr-k8s-webhook-server/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.500841 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-ql5c8_cfa1a1ce-0405-488f-91e9-f12ccfd2636c/frr/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.562012 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7bfd4cf5df-b9zdk_ab57e351-5255-44f7-a345-ce572861d96c/manager/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.699854 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5d64c8548f-x44sv_cca01e6e-5d15-486b-8c28-3f21c54fa045/webhook-server/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.764330 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dwj7m_db72cefb-692a-456b-8326-073a98c6f7a7/kube-rbac-proxy/0.log" Oct 02 14:50:32 crc kubenswrapper[4717]: I1002 14:50:32.909400 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-dwj7m_db72cefb-692a-456b-8326-073a98c6f7a7/speaker/0.log" Oct 02 14:50:41 crc kubenswrapper[4717]: I1002 14:50:41.839171 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:50:41 crc kubenswrapper[4717]: E1002 14:50:41.840052 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.104847 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-7b32-account-create-s29lb_9556b356-4c0a-4d9d-9ff8-c949f3926434/mariadb-account-create/0.log" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.270184 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-db-create-ml46j_c56ce587-1ba8-401e-9238-30f2216e0aee/mariadb-database-create/0.log" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.348831 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-db-sync-7q558_b6b08c0c-8c1e-43aa-8457-5ea7f6641148/glance-db-sync/0.log" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.462116 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_01f9e583-3cac-4016-a4a3-3b5107a0c990/glance-api/0.log" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.531617 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_01f9e583-3cac-4016-a4a3-3b5107a0c990/glance-httpd/0.log" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.571232 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_01f9e583-3cac-4016-a4a3-3b5107a0c990/glance-log/0.log" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.654440 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_f7e7b28c-2adf-4d68-a97c-683603dec324/glance-api/0.log" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.728273 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_f7e7b28c-2adf-4d68-a97c-683603dec324/glance-httpd/0.log" Oct 02 14:50:48 crc kubenswrapper[4717]: I1002 14:50:48.778203 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_f7e7b28c-2adf-4d68-a97c-683603dec324/glance-log/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.131105 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_memcached-0_c19128ae-cc4b-47e3-a572-88003d524ea2/memcached/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.144757 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_keystone-79f8987b94-jwn49_53a403d0-5b76-48a7-8992-55c1b84b0d8e/keystone-api/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.188683 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_5de254a6-3038-4865-b5b3-2efd0b6fe371/mysql-bootstrap/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.433965 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_5de254a6-3038-4865-b5b3-2efd0b6fe371/mysql-bootstrap/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.443259 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_27e47c56-7639-484b-a693-b8cb67491d57/mysql-bootstrap/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.455709 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_5de254a6-3038-4865-b5b3-2efd0b6fe371/galera/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.649530 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_27e47c56-7639-484b-a693-b8cb67491d57/mysql-bootstrap/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.665828 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_27e47c56-7639-484b-a693-b8cb67491d57/galera/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.722474 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_50f14d7b-2533-4238-ae56-1416dd65e626/mysql-bootstrap/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.956809 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_50f14d7b-2533-4238-ae56-1416dd65e626/galera/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.961995 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_50f14d7b-2533-4238-ae56-1416dd65e626/mysql-bootstrap/0.log" Oct 02 14:50:49 crc kubenswrapper[4717]: I1002 14:50:49.986538 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstackclient_10372fe8-c834-4699-825d-4fef6b48cfc1/openstackclient/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.190328 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_6c8e897f-fef0-42ff-a151-6323550dcab0/setup-container/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.367816 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_6c8e897f-fef0-42ff-a151-6323550dcab0/setup-container/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.401827 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_6c8e897f-fef0-42ff-a151-6323550dcab0/rabbitmq/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.424759 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-proxy-59cb459c9f-krwjd_33b2243b-a2fc-4332-b6c3-c4c0af731c8c/proxy-httpd/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.537445 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-proxy-59cb459c9f-krwjd_33b2243b-a2fc-4332-b6c3-c4c0af731c8c/proxy-server/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.590925 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-ring-rebalance-tcfzv_4d807c62-a7f6-43c8-bd1d-826a8bb17e0d/swift-ring-rebalance/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.771903 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/account-replicator/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.778183 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/account-auditor/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.779012 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/account-reaper/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.850100 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/account-server/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.948146 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/container-auditor/0.log" Oct 02 14:50:50 crc kubenswrapper[4717]: I1002 14:50:50.977174 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/container-replicator/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.068691 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/container-updater/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.088254 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/container-server/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.275694 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/object-auditor/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.301993 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/object-expirer/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.397491 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/object-updater/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.400792 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/object-replicator/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.408813 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/object-server/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.481236 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/rsync/0.log" Oct 02 14:50:51 crc kubenswrapper[4717]: I1002 14:50:51.509713 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_a4258a20-8978-4f2a-bb99-793fe396938c/swift-recon-cron/0.log" Oct 02 14:50:55 crc kubenswrapper[4717]: I1002 14:50:55.042793 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-ml46j"] Oct 02 14:50:55 crc kubenswrapper[4717]: I1002 14:50:55.049279 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-ml46j"] Oct 02 14:50:55 crc kubenswrapper[4717]: I1002 14:50:55.839324 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:50:55 crc kubenswrapper[4717]: E1002 14:50:55.839894 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:50:56 crc kubenswrapper[4717]: I1002 14:50:56.848111 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c56ce587-1ba8-401e-9238-30f2216e0aee" path="/var/lib/kubelet/pods/c56ce587-1ba8-401e-9238-30f2216e0aee/volumes" Oct 02 14:51:04 crc kubenswrapper[4717]: I1002 14:51:04.712324 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_8d3a60d1-ca33-4f9e-9499-c7933449692b/util/0.log" Oct 02 14:51:04 crc kubenswrapper[4717]: I1002 14:51:04.947517 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_8d3a60d1-ca33-4f9e-9499-c7933449692b/pull/0.log" Oct 02 14:51:04 crc kubenswrapper[4717]: I1002 14:51:04.955597 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_8d3a60d1-ca33-4f9e-9499-c7933449692b/pull/0.log" Oct 02 14:51:04 crc kubenswrapper[4717]: I1002 14:51:04.989691 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_8d3a60d1-ca33-4f9e-9499-c7933449692b/util/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.025382 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-7b32-account-create-s29lb"] Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.034206 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-7b32-account-create-s29lb"] Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.104285 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_8d3a60d1-ca33-4f9e-9499-c7933449692b/util/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.125398 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_8d3a60d1-ca33-4f9e-9499-c7933449692b/extract/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.170179 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2q7nc9_8d3a60d1-ca33-4f9e-9499-c7933449692b/pull/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.320832 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdghh_f9960bc8-e961-4f52-bb65-5b59f0858a5c/extract-utilities/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.455310 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdghh_f9960bc8-e961-4f52-bb65-5b59f0858a5c/extract-utilities/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.474038 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdghh_f9960bc8-e961-4f52-bb65-5b59f0858a5c/extract-content/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.512310 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdghh_f9960bc8-e961-4f52-bb65-5b59f0858a5c/extract-content/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.646062 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdghh_f9960bc8-e961-4f52-bb65-5b59f0858a5c/extract-utilities/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.663278 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdghh_f9960bc8-e961-4f52-bb65-5b59f0858a5c/extract-content/0.log" Oct 02 14:51:05 crc kubenswrapper[4717]: I1002 14:51:05.858724 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-87wh7_f44d4e10-b4a4-4d65-8c76-7907c8a5d882/extract-utilities/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.108200 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-87wh7_f44d4e10-b4a4-4d65-8c76-7907c8a5d882/extract-content/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.113319 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-87wh7_f44d4e10-b4a4-4d65-8c76-7907c8a5d882/extract-content/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.144951 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-87wh7_f44d4e10-b4a4-4d65-8c76-7907c8a5d882/extract-utilities/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.221856 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-rdghh_f9960bc8-e961-4f52-bb65-5b59f0858a5c/registry-server/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.306900 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-87wh7_f44d4e10-b4a4-4d65-8c76-7907c8a5d882/extract-utilities/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.497474 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-87wh7_f44d4e10-b4a4-4d65-8c76-7907c8a5d882/extract-content/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.732554 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-vxzvh_83fecab7-2ae7-4bf6-88d5-7233871a02bc/marketplace-operator/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.848700 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9556b356-4c0a-4d9d-9ff8-c949f3926434" path="/var/lib/kubelet/pods/9556b356-4c0a-4d9d-9ff8-c949f3926434/volumes" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.895226 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-87wh7_f44d4e10-b4a4-4d65-8c76-7907c8a5d882/registry-server/0.log" Oct 02 14:51:06 crc kubenswrapper[4717]: I1002 14:51:06.918106 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-cf592_942f28a4-5dee-444a-8528-1794e832be15/extract-utilities/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.081960 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-cf592_942f28a4-5dee-444a-8528-1794e832be15/extract-utilities/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.086770 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-cf592_942f28a4-5dee-444a-8528-1794e832be15/extract-content/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.088948 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-cf592_942f28a4-5dee-444a-8528-1794e832be15/extract-content/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.276880 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-cf592_942f28a4-5dee-444a-8528-1794e832be15/extract-utilities/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.319003 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-cf592_942f28a4-5dee-444a-8528-1794e832be15/extract-content/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.442690 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-cf592_942f28a4-5dee-444a-8528-1794e832be15/registry-server/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.480536 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wl69_8f70b22a-c207-48df-90fc-ea87f232da17/extract-utilities/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.655902 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wl69_8f70b22a-c207-48df-90fc-ea87f232da17/extract-utilities/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.718038 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wl69_8f70b22a-c207-48df-90fc-ea87f232da17/extract-content/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.718637 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wl69_8f70b22a-c207-48df-90fc-ea87f232da17/extract-content/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.839578 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:51:07 crc kubenswrapper[4717]: E1002 14:51:07.840000 4717 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-sk55f_openshift-machine-config-operator(405aba30-0ff3-4fca-a5da-09c35263665d)\"" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.871874 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wl69_8f70b22a-c207-48df-90fc-ea87f232da17/extract-content/0.log" Oct 02 14:51:07 crc kubenswrapper[4717]: I1002 14:51:07.968513 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wl69_8f70b22a-c207-48df-90fc-ea87f232da17/extract-utilities/0.log" Oct 02 14:51:08 crc kubenswrapper[4717]: I1002 14:51:08.379419 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-2wl69_8f70b22a-c207-48df-90fc-ea87f232da17/registry-server/0.log" Oct 02 14:51:13 crc kubenswrapper[4717]: I1002 14:51:13.032594 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-7q558"] Oct 02 14:51:13 crc kubenswrapper[4717]: I1002 14:51:13.037855 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-7q558"] Oct 02 14:51:14 crc kubenswrapper[4717]: I1002 14:51:14.849033 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6b08c0c-8c1e-43aa-8457-5ea7f6641148" path="/var/lib/kubelet/pods/b6b08c0c-8c1e-43aa-8457-5ea7f6641148/volumes" Oct 02 14:51:21 crc kubenswrapper[4717]: I1002 14:51:21.839925 4717 scope.go:117] "RemoveContainer" containerID="3b6c2379201d3325ca0fea62c549025fa6b088042dec13f0888fa2ed65d8298a" Oct 02 14:51:22 crc kubenswrapper[4717]: I1002 14:51:22.811884 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" event={"ID":"405aba30-0ff3-4fca-a5da-09c35263665d","Type":"ContainerStarted","Data":"c0c61cacaf3f9f6c4c370b716ca52b3950c97d66267669da01813761c4bbd5ce"} Oct 02 14:51:56 crc kubenswrapper[4717]: I1002 14:51:56.785279 4717 scope.go:117] "RemoveContainer" containerID="5d90516f3c568f0109ffce2790fb5379ae9ce82b61f6f6fe0143ee7b84d8e340" Oct 02 14:51:56 crc kubenswrapper[4717]: I1002 14:51:56.808055 4717 scope.go:117] "RemoveContainer" containerID="7d58c622862576531efaca91b645b90751c39f211c96f1d8c586fb31e9300999" Oct 02 14:51:56 crc kubenswrapper[4717]: I1002 14:51:56.876422 4717 scope.go:117] "RemoveContainer" containerID="322d84879e25aea6243774b05bb394d9da0ffc318606cac21f88fd34e3e39fa8" Oct 02 14:52:15 crc kubenswrapper[4717]: I1002 14:52:15.254238 4717 generic.go:334] "Generic (PLEG): container finished" podID="24058d7b-a548-427d-8f67-39ce7cae05e3" containerID="eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781" exitCode=0 Oct 02 14:52:15 crc kubenswrapper[4717]: I1002 14:52:15.254330 4717 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" event={"ID":"24058d7b-a548-427d-8f67-39ce7cae05e3","Type":"ContainerDied","Data":"eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781"} Oct 02 14:52:15 crc kubenswrapper[4717]: I1002 14:52:15.256030 4717 scope.go:117] "RemoveContainer" containerID="eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781" Oct 02 14:52:16 crc kubenswrapper[4717]: I1002 14:52:16.028433 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-hgh4x_must-gather-vvtqg_24058d7b-a548-427d-8f67-39ce7cae05e3/gather/0.log" Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.205191 4717 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-hgh4x/must-gather-vvtqg"] Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.206423 4717 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" podUID="24058d7b-a548-427d-8f67-39ce7cae05e3" containerName="copy" containerID="cri-o://13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25" gracePeriod=2 Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.212468 4717 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-hgh4x/must-gather-vvtqg"] Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.635429 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-hgh4x_must-gather-vvtqg_24058d7b-a548-427d-8f67-39ce7cae05e3/copy/0.log" Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.636550 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.681856 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5m55\" (UniqueName: \"kubernetes.io/projected/24058d7b-a548-427d-8f67-39ce7cae05e3-kube-api-access-k5m55\") pod \"24058d7b-a548-427d-8f67-39ce7cae05e3\" (UID: \"24058d7b-a548-427d-8f67-39ce7cae05e3\") " Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.681984 4717 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/24058d7b-a548-427d-8f67-39ce7cae05e3-must-gather-output\") pod \"24058d7b-a548-427d-8f67-39ce7cae05e3\" (UID: \"24058d7b-a548-427d-8f67-39ce7cae05e3\") " Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.689741 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24058d7b-a548-427d-8f67-39ce7cae05e3-kube-api-access-k5m55" (OuterVolumeSpecName: "kube-api-access-k5m55") pod "24058d7b-a548-427d-8f67-39ce7cae05e3" (UID: "24058d7b-a548-427d-8f67-39ce7cae05e3"). InnerVolumeSpecName "kube-api-access-k5m55". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.772022 4717 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24058d7b-a548-427d-8f67-39ce7cae05e3-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "24058d7b-a548-427d-8f67-39ce7cae05e3" (UID: "24058d7b-a548-427d-8f67-39ce7cae05e3"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.784364 4717 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/24058d7b-a548-427d-8f67-39ce7cae05e3-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 02 14:52:23 crc kubenswrapper[4717]: I1002 14:52:23.784446 4717 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5m55\" (UniqueName: \"kubernetes.io/projected/24058d7b-a548-427d-8f67-39ce7cae05e3-kube-api-access-k5m55\") on node \"crc\" DevicePath \"\"" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.344860 4717 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-hgh4x_must-gather-vvtqg_24058d7b-a548-427d-8f67-39ce7cae05e3/copy/0.log" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.345356 4717 generic.go:334] "Generic (PLEG): container finished" podID="24058d7b-a548-427d-8f67-39ce7cae05e3" containerID="13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25" exitCode=143 Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.345433 4717 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-hgh4x/must-gather-vvtqg" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.345442 4717 scope.go:117] "RemoveContainer" containerID="13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.364721 4717 scope.go:117] "RemoveContainer" containerID="eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.434090 4717 scope.go:117] "RemoveContainer" containerID="13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25" Oct 02 14:52:24 crc kubenswrapper[4717]: E1002 14:52:24.434687 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25\": container with ID starting with 13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25 not found: ID does not exist" containerID="13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.434762 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25"} err="failed to get container status \"13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25\": rpc error: code = NotFound desc = could not find container \"13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25\": container with ID starting with 13f480f93be9cd3049eadebb5168ce14cd9393ce00bee14fdb0d2d0a76c1ba25 not found: ID does not exist" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.434804 4717 scope.go:117] "RemoveContainer" containerID="eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781" Oct 02 14:52:24 crc kubenswrapper[4717]: E1002 14:52:24.435218 4717 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781\": container with ID starting with eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781 not found: ID does not exist" containerID="eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.435262 4717 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781"} err="failed to get container status \"eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781\": rpc error: code = NotFound desc = could not find container \"eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781\": container with ID starting with eb797f1ee7168ed337281609da34adb029daf7cb5fbbb9681f1d762072c11781 not found: ID does not exist" Oct 02 14:52:24 crc kubenswrapper[4717]: I1002 14:52:24.851649 4717 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24058d7b-a548-427d-8f67-39ce7cae05e3" path="/var/lib/kubelet/pods/24058d7b-a548-427d-8f67-39ce7cae05e3/volumes" Oct 02 14:53:48 crc kubenswrapper[4717]: I1002 14:53:48.620268 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:53:48 crc kubenswrapper[4717]: I1002 14:53:48.622568 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 02 14:54:18 crc kubenswrapper[4717]: I1002 14:54:18.621521 4717 patch_prober.go:28] interesting pod/machine-config-daemon-sk55f container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 02 14:54:18 crc kubenswrapper[4717]: I1002 14:54:18.622149 4717 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-sk55f" podUID="405aba30-0ff3-4fca-a5da-09c35263665d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067511067024455 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067511067017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067504624016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067504624015466 5ustar corecore